1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
61 #include "langhooks.h"
64 #include "tree-flow.h"
66 /* Nonzero if we are folding constants inside an initializer; zero
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
92 static bool negate_mathfn_p (enum built_in_function);
93 static bool negate_expr_p (tree);
94 static tree negate_expr (tree);
95 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
97 static tree const_binop (enum tree_code, tree, tree);
98 static enum comparison_code comparison_to_compcode (enum tree_code);
99 static enum tree_code compcode_to_comparison (enum comparison_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
104 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (location_t, tree, tree,
106 HOST_WIDE_INT, HOST_WIDE_INT, int);
107 static tree optimize_bit_field_compare (location_t, enum tree_code,
109 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
113 static int all_ones_mask_p (const_tree, int);
114 static tree sign_bit_p (tree, const_tree);
115 static int simple_operand_p (const_tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 extern tree make_range (tree, int *, tree *, tree *, bool *);
120 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
122 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
123 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
124 static tree unextend (tree, int, int, tree);
125 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
126 static tree optimize_minmax_comparison (location_t, enum tree_code,
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static tree fold_binary_op_with_conditional_arg (location_t,
131 enum tree_code, tree,
134 static tree fold_mathfn_compare (location_t,
135 enum built_in_function, enum tree_code,
137 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (const_tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static tree fold_convert_const (enum tree_code, tree, tree);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* If ARG2 divides ARG1 with zero remainder, carries out the division
157 of type CODE and returns the quotient.
158 Otherwise returns NULL_TREE. */
161 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
166 /* The sign of the division is according to operand two, that
167 does the correct thing for POINTER_PLUS_EXPR where we want
168 a signed division. */
169 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
170 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
171 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
174 quo = double_int_divmod (tree_to_double_int (arg1),
175 tree_to_double_int (arg2),
178 if (double_int_zero_p (rem))
179 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
184 /* This is nonzero if we should defer warnings about undefined
185 overflow. This facility exists because these warnings are a
186 special case. The code to estimate loop iterations does not want
187 to issue any warnings, since it works with expressions which do not
188 occur in user code. Various bits of cleanup code call fold(), but
189 only use the result if it has certain characteristics (e.g., is a
190 constant); that code only wants to issue a warning if the result is
193 static int fold_deferring_overflow_warnings;
195 /* If a warning about undefined overflow is deferred, this is the
196 warning. Note that this may cause us to turn two warnings into
197 one, but that is fine since it is sufficient to only give one
198 warning per expression. */
200 static const char* fold_deferred_overflow_warning;
202 /* If a warning about undefined overflow is deferred, this is the
203 level at which the warning should be emitted. */
205 static enum warn_strict_overflow_code fold_deferred_overflow_code;
207 /* Start deferring overflow warnings. We could use a stack here to
208 permit nested calls, but at present it is not necessary. */
211 fold_defer_overflow_warnings (void)
213 ++fold_deferring_overflow_warnings;
216 /* Stop deferring overflow warnings. If there is a pending warning,
217 and ISSUE is true, then issue the warning if appropriate. STMT is
218 the statement with which the warning should be associated (used for
219 location information); STMT may be NULL. CODE is the level of the
220 warning--a warn_strict_overflow_code value. This function will use
221 the smaller of CODE and the deferred code when deciding whether to
222 issue the warning. CODE may be zero to mean to always use the
226 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
231 gcc_assert (fold_deferring_overflow_warnings > 0);
232 --fold_deferring_overflow_warnings;
233 if (fold_deferring_overflow_warnings > 0)
235 if (fold_deferred_overflow_warning != NULL
237 && code < (int) fold_deferred_overflow_code)
238 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 warnmsg = fold_deferred_overflow_warning;
243 fold_deferred_overflow_warning = NULL;
245 if (!issue || warnmsg == NULL)
248 if (gimple_no_warning_p (stmt))
251 /* Use the smallest code level when deciding to issue the
253 if (code == 0 || code > (int) fold_deferred_overflow_code)
254 code = fold_deferred_overflow_code;
256 if (!issue_strict_overflow_warning (code))
260 locus = input_location;
262 locus = gimple_location (stmt);
263 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
266 /* Stop deferring overflow warnings, ignoring any deferred
270 fold_undefer_and_ignore_overflow_warnings (void)
272 fold_undefer_overflow_warnings (false, NULL, 0);
275 /* Whether we are deferring overflow warnings. */
278 fold_deferring_overflow_warnings_p (void)
280 return fold_deferring_overflow_warnings > 0;
283 /* This is called when we fold something based on the fact that signed
284 overflow is undefined. */
287 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
289 if (fold_deferring_overflow_warnings > 0)
291 if (fold_deferred_overflow_warning == NULL
292 || wc < fold_deferred_overflow_code)
294 fold_deferred_overflow_warning = gmsgid;
295 fold_deferred_overflow_code = wc;
298 else if (issue_strict_overflow_warning (wc))
299 warning (OPT_Wstrict_overflow, gmsgid);
302 /* Return true if the built-in mathematical function specified by CODE
303 is odd, i.e. -f(x) == f(-x). */
306 negate_mathfn_p (enum built_in_function code)
310 CASE_FLT_FN (BUILT_IN_ASIN):
311 CASE_FLT_FN (BUILT_IN_ASINH):
312 CASE_FLT_FN (BUILT_IN_ATAN):
313 CASE_FLT_FN (BUILT_IN_ATANH):
314 CASE_FLT_FN (BUILT_IN_CASIN):
315 CASE_FLT_FN (BUILT_IN_CASINH):
316 CASE_FLT_FN (BUILT_IN_CATAN):
317 CASE_FLT_FN (BUILT_IN_CATANH):
318 CASE_FLT_FN (BUILT_IN_CBRT):
319 CASE_FLT_FN (BUILT_IN_CPROJ):
320 CASE_FLT_FN (BUILT_IN_CSIN):
321 CASE_FLT_FN (BUILT_IN_CSINH):
322 CASE_FLT_FN (BUILT_IN_CTAN):
323 CASE_FLT_FN (BUILT_IN_CTANH):
324 CASE_FLT_FN (BUILT_IN_ERF):
325 CASE_FLT_FN (BUILT_IN_LLROUND):
326 CASE_FLT_FN (BUILT_IN_LROUND):
327 CASE_FLT_FN (BUILT_IN_ROUND):
328 CASE_FLT_FN (BUILT_IN_SIN):
329 CASE_FLT_FN (BUILT_IN_SINH):
330 CASE_FLT_FN (BUILT_IN_TAN):
331 CASE_FLT_FN (BUILT_IN_TANH):
332 CASE_FLT_FN (BUILT_IN_TRUNC):
335 CASE_FLT_FN (BUILT_IN_LLRINT):
336 CASE_FLT_FN (BUILT_IN_LRINT):
337 CASE_FLT_FN (BUILT_IN_NEARBYINT):
338 CASE_FLT_FN (BUILT_IN_RINT):
339 return !flag_rounding_math;
347 /* Check whether we may negate an integer constant T without causing
351 may_negate_without_overflow_p (const_tree t)
353 unsigned HOST_WIDE_INT val;
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
363 prec = TYPE_PRECISION (type);
364 if (prec > HOST_BITS_PER_WIDE_INT)
366 if (TREE_INT_CST_LOW (t) != 0)
368 prec -= HOST_BITS_PER_WIDE_INT;
369 val = TREE_INT_CST_HIGH (t);
372 val = TREE_INT_CST_LOW (t);
373 if (prec < HOST_BITS_PER_WIDE_INT)
374 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
375 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
382 negate_expr_p (tree t)
389 type = TREE_TYPE (t);
392 switch (TREE_CODE (t))
395 if (TYPE_OVERFLOW_WRAPS (type))
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
409 /* We want to canonicalize to positive real constants. Pretend
410 that only negative ones can be easily negated. */
411 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
414 return negate_expr_p (TREE_REALPART (t))
415 && negate_expr_p (TREE_IMAGPART (t));
418 return negate_expr_p (TREE_OPERAND (t, 0))
419 && negate_expr_p (TREE_OPERAND (t, 1));
422 return negate_expr_p (TREE_OPERAND (t, 0));
425 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
426 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
428 /* -(A + B) -> (-B) - A. */
429 if (negate_expr_p (TREE_OPERAND (t, 1))
430 && reorder_operands_p (TREE_OPERAND (t, 0),
431 TREE_OPERAND (t, 1)))
433 /* -(A + B) -> (-A) - B. */
434 return negate_expr_p (TREE_OPERAND (t, 0));
437 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
438 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
439 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
440 && reorder_operands_p (TREE_OPERAND (t, 0),
441 TREE_OPERAND (t, 1));
444 if (TYPE_UNSIGNED (TREE_TYPE (t)))
450 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
451 return negate_expr_p (TREE_OPERAND (t, 1))
452 || negate_expr_p (TREE_OPERAND (t, 0));
460 /* In general we can't negate A / B, because if A is INT_MIN and
461 B is 1, we may turn this into INT_MIN / -1 which is undefined
462 and actually traps on some architectures. But if overflow is
463 undefined, we can negate, because - (INT_MIN / 1) is an
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
468 return negate_expr_p (TREE_OPERAND (t, 1))
469 || negate_expr_p (TREE_OPERAND (t, 0));
472 /* Negate -((double)float) as (double)(-float). */
473 if (TREE_CODE (type) == REAL_TYPE)
475 tree tem = strip_float_extensions (t);
477 return negate_expr_p (tem);
482 /* Negate -f(x) as f(-x). */
483 if (negate_mathfn_p (builtin_mathfn_code (t)))
484 return negate_expr_p (CALL_EXPR_ARG (t, 0));
488 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
489 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
491 tree op1 = TREE_OPERAND (t, 1);
492 if (TREE_INT_CST_HIGH (op1) == 0
493 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
494 == TREE_INT_CST_LOW (op1))
505 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
506 simplification is possible.
507 If negate_expr_p would return true for T, NULL_TREE will never be
511 fold_negate_expr (location_t loc, tree t)
513 tree type = TREE_TYPE (t);
516 switch (TREE_CODE (t))
518 /* Convert - (~A) to A + 1. */
520 if (INTEGRAL_TYPE_P (type))
521 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
522 build_int_cst (type, 1));
526 tem = fold_negate_const (t, type);
527 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
528 || !TYPE_OVERFLOW_TRAPS (type))
533 tem = fold_negate_const (t, type);
534 /* Two's complement FP formats, such as c4x, may overflow. */
535 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
540 tem = fold_negate_const (t, type);
545 tree rpart = negate_expr (TREE_REALPART (t));
546 tree ipart = negate_expr (TREE_IMAGPART (t));
548 if ((TREE_CODE (rpart) == REAL_CST
549 && TREE_CODE (ipart) == REAL_CST)
550 || (TREE_CODE (rpart) == INTEGER_CST
551 && TREE_CODE (ipart) == INTEGER_CST))
552 return build_complex (type, rpart, ipart);
557 if (negate_expr_p (t))
558 return fold_build2_loc (loc, COMPLEX_EXPR, type,
559 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
560 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
564 if (negate_expr_p (t))
565 return fold_build1_loc (loc, CONJ_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
570 return TREE_OPERAND (t, 0);
573 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
574 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
576 /* -(A + B) -> (-B) - A. */
577 if (negate_expr_p (TREE_OPERAND (t, 1))
578 && reorder_operands_p (TREE_OPERAND (t, 0),
579 TREE_OPERAND (t, 1)))
581 tem = negate_expr (TREE_OPERAND (t, 1));
582 return fold_build2_loc (loc, MINUS_EXPR, type,
583 tem, TREE_OPERAND (t, 0));
586 /* -(A + B) -> (-A) - B. */
587 if (negate_expr_p (TREE_OPERAND (t, 0)))
589 tem = negate_expr (TREE_OPERAND (t, 0));
590 return fold_build2_loc (loc, MINUS_EXPR, type,
591 tem, TREE_OPERAND (t, 1));
597 /* - (A - B) -> B - A */
598 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
599 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
600 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
601 return fold_build2_loc (loc, MINUS_EXPR, type,
602 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
606 if (TYPE_UNSIGNED (type))
612 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
614 tem = TREE_OPERAND (t, 1);
615 if (negate_expr_p (tem))
616 return fold_build2_loc (loc, TREE_CODE (t), type,
617 TREE_OPERAND (t, 0), negate_expr (tem));
618 tem = TREE_OPERAND (t, 0);
619 if (negate_expr_p (tem))
620 return fold_build2_loc (loc, TREE_CODE (t), type,
621 negate_expr (tem), TREE_OPERAND (t, 1));
630 /* In general we can't negate A / B, because if A is INT_MIN and
631 B is 1, we may turn this into INT_MIN / -1 which is undefined
632 and actually traps on some architectures. But if overflow is
633 undefined, we can negate, because - (INT_MIN / 1) is an
635 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
637 const char * const warnmsg = G_("assuming signed overflow does not "
638 "occur when negating a division");
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
642 if (INTEGRAL_TYPE_P (type)
643 && (TREE_CODE (tem) != INTEGER_CST
644 || integer_onep (tem)))
645 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 TREE_OPERAND (t, 0), negate_expr (tem));
649 tem = TREE_OPERAND (t, 0);
650 if (negate_expr_p (tem))
652 if (INTEGRAL_TYPE_P (type)
653 && (TREE_CODE (tem) != INTEGER_CST
654 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
655 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (tem), TREE_OPERAND (t, 1));
663 /* Convert -((double)float) into (double)(-float). */
664 if (TREE_CODE (type) == REAL_TYPE)
666 tem = strip_float_extensions (t);
667 if (tem != t && negate_expr_p (tem))
668 return fold_convert_loc (loc, type, negate_expr (tem));
673 /* Negate -f(x) as f(-x). */
674 if (negate_mathfn_p (builtin_mathfn_code (t))
675 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
679 fndecl = get_callee_fndecl (t);
680 arg = negate_expr (CALL_EXPR_ARG (t, 0));
681 return build_call_expr_loc (loc, fndecl, 1, arg);
686 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
687 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
689 tree op1 = TREE_OPERAND (t, 1);
690 if (TREE_INT_CST_HIGH (op1) == 0
691 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
692 == TREE_INT_CST_LOW (op1))
694 tree ntype = TYPE_UNSIGNED (type)
695 ? signed_type_for (type)
696 : unsigned_type_for (type);
697 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
698 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
699 return fold_convert_loc (loc, type, temp);
711 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
712 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
724 loc = EXPR_LOCATION (t);
725 type = TREE_TYPE (t);
728 tem = fold_negate_expr (loc, t);
731 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
732 SET_EXPR_LOCATION (tem, loc);
734 return fold_convert_loc (loc, type, tem);
737 /* Split a tree IN into a constant, literal and variable parts that could be
738 combined with CODE to make IN. "constant" means an expression with
739 TREE_CONSTANT but that isn't an actual constant. CODE must be a
740 commutative arithmetic operation. Store the constant part into *CONP,
741 the literal in *LITP and return the variable part. If a part isn't
742 present, set it to null. If the tree does not decompose in this way,
743 return the entire tree as the variable part and the other parts as null.
745 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
746 case, we negate an operand that was subtracted. Except if it is a
747 literal for which we use *MINUS_LITP instead.
749 If NEGATE_P is true, we are negating all of IN, again except a literal
750 for which we use *MINUS_LITP instead.
752 If IN is itself a literal or constant, return it as appropriate.
754 Note that we do not guarantee that any of the three values will be the
755 same type as IN, but they will have the same signedness and mode. */
758 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
759 tree *minus_litp, int negate_p)
767 /* Strip any conversions that don't change the machine mode or signedness. */
768 STRIP_SIGN_NOPS (in);
770 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
771 || TREE_CODE (in) == FIXED_CST)
773 else if (TREE_CODE (in) == code
774 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
775 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
776 /* We can associate addition and subtraction together (even
777 though the C standard doesn't say so) for integers because
778 the value is not affected. For reals, the value might be
779 affected, so we can't. */
780 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
781 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
783 tree op0 = TREE_OPERAND (in, 0);
784 tree op1 = TREE_OPERAND (in, 1);
785 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
786 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
788 /* First see if either of the operands is a literal, then a constant. */
789 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
790 || TREE_CODE (op0) == FIXED_CST)
791 *litp = op0, op0 = 0;
792 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
793 || TREE_CODE (op1) == FIXED_CST)
794 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
796 if (op0 != 0 && TREE_CONSTANT (op0))
797 *conp = op0, op0 = 0;
798 else if (op1 != 0 && TREE_CONSTANT (op1))
799 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
801 /* If we haven't dealt with either operand, this is not a case we can
802 decompose. Otherwise, VAR is either of the ones remaining, if any. */
803 if (op0 != 0 && op1 != 0)
808 var = op1, neg_var_p = neg1_p;
810 /* Now do any needed negations. */
812 *minus_litp = *litp, *litp = 0;
814 *conp = negate_expr (*conp);
816 var = negate_expr (var);
818 else if (TREE_CONSTANT (in))
826 *minus_litp = *litp, *litp = 0;
827 else if (*minus_litp)
828 *litp = *minus_litp, *minus_litp = 0;
829 *conp = negate_expr (*conp);
830 var = negate_expr (var);
836 /* Re-associate trees split by the above function. T1 and T2 are
837 either expressions to associate or null. Return the new
838 expression, if any. LOC is the location of the new expression. If
839 we build an operation, do it in TYPE and with CODE. */
842 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
851 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
852 try to fold this since we will have infinite recursion. But do
853 deal with any NEGATE_EXPRs. */
854 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
855 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
857 if (code == PLUS_EXPR)
859 if (TREE_CODE (t1) == NEGATE_EXPR)
860 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
861 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
862 else if (TREE_CODE (t2) == NEGATE_EXPR)
863 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
864 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
865 else if (integer_zerop (t2))
866 return fold_convert_loc (loc, type, t1);
868 else if (code == MINUS_EXPR)
870 if (integer_zerop (t2))
871 return fold_convert_loc (loc, type, t1);
874 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
875 fold_convert_loc (loc, type, t2));
876 goto associate_trees_exit;
879 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type, t2));
881 associate_trees_exit:
882 protected_set_expr_location (tem, loc);
886 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
887 for use in int_const_binop, size_binop and size_diffop. */
890 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
892 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
894 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
909 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
910 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
911 && TYPE_MODE (type1) == TYPE_MODE (type2);
915 /* Combine two integer constants ARG1 and ARG2 under operation CODE
916 to produce a new constant. Return NULL_TREE if we don't know how
917 to evaluate CODE at compile-time.
919 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
922 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
924 double_int op1, op2, res, tmp;
926 tree type = TREE_TYPE (arg1);
927 bool uns = TYPE_UNSIGNED (type);
929 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
930 bool overflow = false;
932 op1 = tree_to_double_int (arg1);
933 op2 = tree_to_double_int (arg2);
938 res = double_int_ior (op1, op2);
942 res = double_int_xor (op1, op2);
946 res = double_int_and (op1, op2);
950 res = double_int_rshift (op1, double_int_to_shwi (op2),
951 TYPE_PRECISION (type), !uns);
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = double_int_lshift (op1, double_int_to_shwi (op2),
959 TYPE_PRECISION (type), !uns);
963 res = double_int_rrotate (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type));
968 res = double_int_lrotate (op1, double_int_to_shwi (op2),
969 TYPE_PRECISION (type));
973 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
974 &res.low, &res.high);
978 neg_double (op2.low, op2.high, &res.low, &res.high);
979 add_double (op1.low, op1.high, res.low, res.high,
980 &res.low, &res.high);
981 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
985 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
986 &res.low, &res.high);
990 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
992 /* This is a shortcut for a common special case. */
993 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
994 && !TREE_OVERFLOW (arg1)
995 && !TREE_OVERFLOW (arg2)
996 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
998 if (code == CEIL_DIV_EXPR)
999 op1.low += op2.low - 1;
1001 res.low = op1.low / op2.low, res.high = 0;
1005 /* ... fall through ... */
1007 case ROUND_DIV_EXPR:
1008 if (double_int_zero_p (op2))
1010 if (double_int_one_p (op2))
1015 if (double_int_equal_p (op1, op2)
1016 && ! double_int_zero_p (op1))
1018 res = double_int_one;
1021 overflow = div_and_round_double (code, uns,
1022 op1.low, op1.high, op2.low, op2.high,
1023 &res.low, &res.high,
1024 &tmp.low, &tmp.high);
1027 case TRUNC_MOD_EXPR:
1028 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1029 /* This is a shortcut for a common special case. */
1030 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1031 && !TREE_OVERFLOW (arg1)
1032 && !TREE_OVERFLOW (arg2)
1033 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1035 if (code == CEIL_MOD_EXPR)
1036 op1.low += op2.low - 1;
1037 res.low = op1.low % op2.low, res.high = 0;
1041 /* ... fall through ... */
1043 case ROUND_MOD_EXPR:
1044 if (double_int_zero_p (op2))
1046 overflow = div_and_round_double (code, uns,
1047 op1.low, op1.high, op2.low, op2.high,
1048 &tmp.low, &tmp.high,
1049 &res.low, &res.high);
1053 res = double_int_min (op1, op2, uns);
1057 res = double_int_max (op1, op2, uns);
1066 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1068 /* Propagate overflow flags ourselves. */
1069 if (((!uns || is_sizetype) && overflow)
1070 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1073 TREE_OVERFLOW (t) = 1;
1077 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1078 ((!uns || is_sizetype) && overflow)
1079 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1084 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1085 constant. We assume ARG1 and ARG2 have the same data type, or at least
1086 are the same kind of constant and the same machine mode. Return zero if
1087 combining the constants is not allowed in the current operating mode. */
1090 const_binop (enum tree_code code, tree arg1, tree arg2)
1092 /* Sanity check for the recursive cases. */
1099 if (TREE_CODE (arg1) == INTEGER_CST)
1100 return int_const_binop (code, arg1, arg2, 0);
1102 if (TREE_CODE (arg1) == REAL_CST)
1104 enum machine_mode mode;
1107 REAL_VALUE_TYPE value;
1108 REAL_VALUE_TYPE result;
1112 /* The following codes are handled by real_arithmetic. */
1127 d1 = TREE_REAL_CST (arg1);
1128 d2 = TREE_REAL_CST (arg2);
1130 type = TREE_TYPE (arg1);
1131 mode = TYPE_MODE (type);
1133 /* Don't perform operation if we honor signaling NaNs and
1134 either operand is a NaN. */
1135 if (HONOR_SNANS (mode)
1136 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1139 /* Don't perform operation if it would raise a division
1140 by zero exception. */
1141 if (code == RDIV_EXPR
1142 && REAL_VALUES_EQUAL (d2, dconst0)
1143 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1146 /* If either operand is a NaN, just return it. Otherwise, set up
1147 for floating-point trap; we return an overflow. */
1148 if (REAL_VALUE_ISNAN (d1))
1150 else if (REAL_VALUE_ISNAN (d2))
1153 inexact = real_arithmetic (&value, code, &d1, &d2);
1154 real_convert (&result, mode, &value);
1156 /* Don't constant fold this floating point operation if
1157 the result has overflowed and flag_trapping_math. */
1158 if (flag_trapping_math
1159 && MODE_HAS_INFINITIES (mode)
1160 && REAL_VALUE_ISINF (result)
1161 && !REAL_VALUE_ISINF (d1)
1162 && !REAL_VALUE_ISINF (d2))
1165 /* Don't constant fold this floating point operation if the
1166 result may dependent upon the run-time rounding mode and
1167 flag_rounding_math is set, or if GCC's software emulation
1168 is unable to accurately represent the result. */
1169 if ((flag_rounding_math
1170 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1171 && (inexact || !real_identical (&result, &value)))
1174 t = build_real (type, result);
1176 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1180 if (TREE_CODE (arg1) == FIXED_CST)
1182 FIXED_VALUE_TYPE f1;
1183 FIXED_VALUE_TYPE f2;
1184 FIXED_VALUE_TYPE result;
1189 /* The following codes are handled by fixed_arithmetic. */
1195 case TRUNC_DIV_EXPR:
1196 f2 = TREE_FIXED_CST (arg2);
1201 f2.data.high = TREE_INT_CST_HIGH (arg2);
1202 f2.data.low = TREE_INT_CST_LOW (arg2);
1210 f1 = TREE_FIXED_CST (arg1);
1211 type = TREE_TYPE (arg1);
1212 sat_p = TYPE_SATURATING (type);
1213 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1214 t = build_fixed (type, result);
1215 /* Propagate overflow flags. */
1216 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1217 TREE_OVERFLOW (t) = 1;
1221 if (TREE_CODE (arg1) == COMPLEX_CST)
1223 tree type = TREE_TYPE (arg1);
1224 tree r1 = TREE_REALPART (arg1);
1225 tree i1 = TREE_IMAGPART (arg1);
1226 tree r2 = TREE_REALPART (arg2);
1227 tree i2 = TREE_IMAGPART (arg2);
1234 real = const_binop (code, r1, r2);
1235 imag = const_binop (code, i1, i2);
1239 if (COMPLEX_FLOAT_TYPE_P (type))
1240 return do_mpc_arg2 (arg1, arg2, type,
1241 /* do_nonfinite= */ folding_initializer,
1244 real = const_binop (MINUS_EXPR,
1245 const_binop (MULT_EXPR, r1, r2),
1246 const_binop (MULT_EXPR, i1, i2));
1247 imag = const_binop (PLUS_EXPR,
1248 const_binop (MULT_EXPR, r1, i2),
1249 const_binop (MULT_EXPR, i1, r2));
1253 if (COMPLEX_FLOAT_TYPE_P (type))
1254 return do_mpc_arg2 (arg1, arg2, type,
1255 /* do_nonfinite= */ folding_initializer,
1258 case TRUNC_DIV_EXPR:
1260 case FLOOR_DIV_EXPR:
1261 case ROUND_DIV_EXPR:
1262 if (flag_complex_method == 0)
1264 /* Keep this algorithm in sync with
1265 tree-complex.c:expand_complex_div_straight().
1267 Expand complex division to scalars, straightforward algorithm.
1268 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1272 = const_binop (PLUS_EXPR,
1273 const_binop (MULT_EXPR, r2, r2),
1274 const_binop (MULT_EXPR, i2, i2));
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r1, r2),
1278 const_binop (MULT_EXPR, i1, i2));
1280 = const_binop (MINUS_EXPR,
1281 const_binop (MULT_EXPR, i1, r2),
1282 const_binop (MULT_EXPR, r1, i2));
1284 real = const_binop (code, t1, magsquared);
1285 imag = const_binop (code, t2, magsquared);
1289 /* Keep this algorithm in sync with
1290 tree-complex.c:expand_complex_div_wide().
1292 Expand complex division to scalars, modified algorithm to minimize
1293 overflow with wide input ranges. */
1294 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1295 fold_abs_const (r2, TREE_TYPE (type)),
1296 fold_abs_const (i2, TREE_TYPE (type)));
1298 if (integer_nonzerop (compare))
1300 /* In the TRUE branch, we compute
1302 div = (br * ratio) + bi;
1303 tr = (ar * ratio) + ai;
1304 ti = (ai * ratio) - ar;
1307 tree ratio = const_binop (code, r2, i2);
1308 tree div = const_binop (PLUS_EXPR, i2,
1309 const_binop (MULT_EXPR, r2, ratio));
1310 real = const_binop (MULT_EXPR, r1, ratio);
1311 real = const_binop (PLUS_EXPR, real, i1);
1312 real = const_binop (code, real, div);
1314 imag = const_binop (MULT_EXPR, i1, ratio);
1315 imag = const_binop (MINUS_EXPR, imag, r1);
1316 imag = const_binop (code, imag, div);
1320 /* In the FALSE branch, we compute
1322 divisor = (d * ratio) + c;
1323 tr = (b * ratio) + a;
1324 ti = b - (a * ratio);
1327 tree ratio = const_binop (code, i2, r2);
1328 tree div = const_binop (PLUS_EXPR, r2,
1329 const_binop (MULT_EXPR, i2, ratio));
1331 real = const_binop (MULT_EXPR, i1, ratio);
1332 real = const_binop (PLUS_EXPR, real, r1);
1333 real = const_binop (code, real, div);
1335 imag = const_binop (MULT_EXPR, r1, ratio);
1336 imag = const_binop (MINUS_EXPR, i1, imag);
1337 imag = const_binop (code, imag, div);
1347 return build_complex (type, real, imag);
1350 if (TREE_CODE (arg1) == VECTOR_CST)
1352 tree type = TREE_TYPE(arg1);
1353 int count = TYPE_VECTOR_SUBPARTS (type), i;
1354 tree elements1, elements2, list = NULL_TREE;
1356 if(TREE_CODE(arg2) != VECTOR_CST)
1359 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1360 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1362 for (i = 0; i < count; i++)
1364 tree elem1, elem2, elem;
1366 /* The trailing elements can be empty and should be treated as 0 */
1368 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1371 elem1 = TREE_VALUE(elements1);
1372 elements1 = TREE_CHAIN (elements1);
1376 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1379 elem2 = TREE_VALUE(elements2);
1380 elements2 = TREE_CHAIN (elements2);
1383 elem = const_binop (code, elem1, elem2);
1385 /* It is possible that const_binop cannot handle the given
1386 code and return NULL_TREE */
1387 if(elem == NULL_TREE)
1390 list = tree_cons (NULL_TREE, elem, list);
1392 return build_vector(type, nreverse(list));
1397 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1398 indicates which particular sizetype to create. */
1401 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1403 return build_int_cst (sizetype_tab[(int) kind], number);
1406 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1407 is a tree code. The type of the result is taken from the operands.
1408 Both must be equivalent integer types, ala int_binop_types_match_p.
1409 If the operands are constant, so is the result. */
1412 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1414 tree type = TREE_TYPE (arg0);
1416 if (arg0 == error_mark_node || arg1 == error_mark_node)
1417 return error_mark_node;
1419 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1422 /* Handle the special case of two integer constants faster. */
1423 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1425 /* And some specific cases even faster than that. */
1426 if (code == PLUS_EXPR)
1428 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1430 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1433 else if (code == MINUS_EXPR)
1435 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1438 else if (code == MULT_EXPR)
1440 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1444 /* Handle general case of two integer constants. */
1445 return int_const_binop (code, arg0, arg1, 0);
1448 return fold_build2_loc (loc, code, type, arg0, arg1);
1451 /* Given two values, either both of sizetype or both of bitsizetype,
1452 compute the difference between the two values. Return the value
1453 in signed type corresponding to the type of the operands. */
1456 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1458 tree type = TREE_TYPE (arg0);
1461 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1464 /* If the type is already signed, just do the simple thing. */
1465 if (!TYPE_UNSIGNED (type))
1466 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1468 if (type == sizetype)
1470 else if (type == bitsizetype)
1471 ctype = sbitsizetype;
1473 ctype = signed_type_for (type);
1475 /* If either operand is not a constant, do the conversions to the signed
1476 type and subtract. The hardware will do the right thing with any
1477 overflow in the subtraction. */
1478 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1479 return size_binop_loc (loc, MINUS_EXPR,
1480 fold_convert_loc (loc, ctype, arg0),
1481 fold_convert_loc (loc, ctype, arg1));
1483 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1484 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1485 overflow) and negate (which can't either). Special-case a result
1486 of zero while we're here. */
1487 if (tree_int_cst_equal (arg0, arg1))
1488 return build_int_cst (ctype, 0);
1489 else if (tree_int_cst_lt (arg1, arg0))
1490 return fold_convert_loc (loc, ctype,
1491 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1493 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1494 fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc,
1500 /* A subroutine of fold_convert_const handling conversions of an
1501 INTEGER_CST to another integer type. */
1504 fold_convert_const_int_from_int (tree type, const_tree arg1)
1508 /* Given an integer constant, make new constant with new type,
1509 appropriately sign-extended or truncated. */
1510 t = force_fit_type_double (type, tree_to_double_int (arg1),
1511 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1512 (TREE_INT_CST_HIGH (arg1) < 0
1513 && (TYPE_UNSIGNED (type)
1514 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1515 | TREE_OVERFLOW (arg1));
1520 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1521 to an integer type. */
1524 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1529 /* The following code implements the floating point to integer
1530 conversion rules required by the Java Language Specification,
1531 that IEEE NaNs are mapped to zero and values that overflow
1532 the target precision saturate, i.e. values greater than
1533 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1534 are mapped to INT_MIN. These semantics are allowed by the
1535 C and C++ standards that simply state that the behavior of
1536 FP-to-integer conversion is unspecified upon overflow. */
1540 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1544 case FIX_TRUNC_EXPR:
1545 real_trunc (&r, VOIDmode, &x);
1552 /* If R is NaN, return zero and show we have an overflow. */
1553 if (REAL_VALUE_ISNAN (r))
1556 val = double_int_zero;
1559 /* See if R is less than the lower bound or greater than the
1564 tree lt = TYPE_MIN_VALUE (type);
1565 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1566 if (REAL_VALUES_LESS (r, l))
1569 val = tree_to_double_int (lt);
1575 tree ut = TYPE_MAX_VALUE (type);
1578 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1579 if (REAL_VALUES_LESS (u, r))
1582 val = tree_to_double_int (ut);
1588 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1590 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1594 /* A subroutine of fold_convert_const handling conversions of a
1595 FIXED_CST to an integer type. */
1598 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1601 double_int temp, temp_trunc;
1604 /* Right shift FIXED_CST to temp by fbit. */
1605 temp = TREE_FIXED_CST (arg1).data;
1606 mode = TREE_FIXED_CST (arg1).mode;
1607 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1609 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1610 HOST_BITS_PER_DOUBLE_INT,
1611 SIGNED_FIXED_POINT_MODE_P (mode));
1613 /* Left shift temp to temp_trunc by fbit. */
1614 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1615 HOST_BITS_PER_DOUBLE_INT,
1616 SIGNED_FIXED_POINT_MODE_P (mode));
1620 temp = double_int_zero;
1621 temp_trunc = double_int_zero;
1624 /* If FIXED_CST is negative, we need to round the value toward 0.
1625 By checking if the fractional bits are not zero to add 1 to temp. */
1626 if (SIGNED_FIXED_POINT_MODE_P (mode)
1627 && double_int_negative_p (temp_trunc)
1628 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1629 temp = double_int_add (temp, double_int_one);
1631 /* Given a fixed-point constant, make new constant with new type,
1632 appropriately sign-extended or truncated. */
1633 t = force_fit_type_double (type, temp, -1,
1634 (double_int_negative_p (temp)
1635 && (TYPE_UNSIGNED (type)
1636 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1637 | TREE_OVERFLOW (arg1));
1642 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1643 to another floating point type. */
1646 fold_convert_const_real_from_real (tree type, const_tree arg1)
1648 REAL_VALUE_TYPE value;
1651 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1652 t = build_real (type, value);
1654 /* If converting an infinity or NAN to a representation that doesn't
1655 have one, set the overflow bit so that we can produce some kind of
1656 error message at the appropriate point if necessary. It's not the
1657 most user-friendly message, but it's better than nothing. */
1658 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1659 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1660 TREE_OVERFLOW (t) = 1;
1661 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1662 && !MODE_HAS_NANS (TYPE_MODE (type)))
1663 TREE_OVERFLOW (t) = 1;
1664 /* Regular overflow, conversion produced an infinity in a mode that
1665 can't represent them. */
1666 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1667 && REAL_VALUE_ISINF (value)
1668 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1669 TREE_OVERFLOW (t) = 1;
1671 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1675 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1676 to a floating point type. */
1679 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1681 REAL_VALUE_TYPE value;
1684 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1685 t = build_real (type, value);
1687 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1691 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1692 to another fixed-point type. */
1695 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1697 FIXED_VALUE_TYPE value;
1701 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1702 TYPE_SATURATING (type));
1703 t = build_fixed (type, value);
1705 /* Propagate overflow flags. */
1706 if (overflow_p | TREE_OVERFLOW (arg1))
1707 TREE_OVERFLOW (t) = 1;
1711 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1712 to a fixed-point type. */
1715 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1717 FIXED_VALUE_TYPE value;
1721 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1722 TREE_INT_CST (arg1),
1723 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1733 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1734 to a fixed-point type. */
1737 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1739 FIXED_VALUE_TYPE value;
1743 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1744 &TREE_REAL_CST (arg1),
1745 TYPE_SATURATING (type));
1746 t = build_fixed (type, value);
1748 /* Propagate overflow flags. */
1749 if (overflow_p | TREE_OVERFLOW (arg1))
1750 TREE_OVERFLOW (t) = 1;
1754 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1755 type TYPE. If no simplification can be done return NULL_TREE. */
1758 fold_convert_const (enum tree_code code, tree type, tree arg1)
1760 if (TREE_TYPE (arg1) == type)
1763 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1764 || TREE_CODE (type) == OFFSET_TYPE)
1766 if (TREE_CODE (arg1) == INTEGER_CST)
1767 return fold_convert_const_int_from_int (type, arg1);
1768 else if (TREE_CODE (arg1) == REAL_CST)
1769 return fold_convert_const_int_from_real (code, type, arg1);
1770 else if (TREE_CODE (arg1) == FIXED_CST)
1771 return fold_convert_const_int_from_fixed (type, arg1);
1773 else if (TREE_CODE (type) == REAL_TYPE)
1775 if (TREE_CODE (arg1) == INTEGER_CST)
1776 return build_real_from_int_cst (type, arg1);
1777 else if (TREE_CODE (arg1) == REAL_CST)
1778 return fold_convert_const_real_from_real (type, arg1);
1779 else if (TREE_CODE (arg1) == FIXED_CST)
1780 return fold_convert_const_real_from_fixed (type, arg1);
1782 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1784 if (TREE_CODE (arg1) == FIXED_CST)
1785 return fold_convert_const_fixed_from_fixed (type, arg1);
1786 else if (TREE_CODE (arg1) == INTEGER_CST)
1787 return fold_convert_const_fixed_from_int (type, arg1);
1788 else if (TREE_CODE (arg1) == REAL_CST)
1789 return fold_convert_const_fixed_from_real (type, arg1);
1794 /* Construct a vector of zero elements of vector type TYPE. */
1797 build_zero_vector (tree type)
1801 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1802 return build_vector_from_val (type, t);
1805 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1808 fold_convertible_p (const_tree type, const_tree arg)
1810 tree orig = TREE_TYPE (arg);
1815 if (TREE_CODE (arg) == ERROR_MARK
1816 || TREE_CODE (type) == ERROR_MARK
1817 || TREE_CODE (orig) == ERROR_MARK)
1820 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1823 switch (TREE_CODE (type))
1825 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1826 case POINTER_TYPE: case REFERENCE_TYPE:
1828 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1829 || TREE_CODE (orig) == OFFSET_TYPE)
1831 return (TREE_CODE (orig) == VECTOR_TYPE
1832 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1835 case FIXED_POINT_TYPE:
1839 return TREE_CODE (type) == TREE_CODE (orig);
1846 /* Convert expression ARG to type TYPE. Used by the middle-end for
1847 simple conversions in preference to calling the front-end's convert. */
1850 fold_convert_loc (location_t loc, tree type, tree arg)
1852 tree orig = TREE_TYPE (arg);
1858 if (TREE_CODE (arg) == ERROR_MARK
1859 || TREE_CODE (type) == ERROR_MARK
1860 || TREE_CODE (orig) == ERROR_MARK)
1861 return error_mark_node;
1863 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1864 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1866 switch (TREE_CODE (type))
1869 case REFERENCE_TYPE:
1870 /* Handle conversions between pointers to different address spaces. */
1871 if (POINTER_TYPE_P (orig)
1872 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1873 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1874 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1877 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1879 if (TREE_CODE (arg) == INTEGER_CST)
1881 tem = fold_convert_const (NOP_EXPR, type, arg);
1882 if (tem != NULL_TREE)
1885 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1886 || TREE_CODE (orig) == OFFSET_TYPE)
1887 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1888 if (TREE_CODE (orig) == COMPLEX_TYPE)
1889 return fold_convert_loc (loc, type,
1890 fold_build1_loc (loc, REALPART_EXPR,
1891 TREE_TYPE (orig), arg));
1892 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1893 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1894 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1897 if (TREE_CODE (arg) == INTEGER_CST)
1899 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1900 if (tem != NULL_TREE)
1903 else if (TREE_CODE (arg) == REAL_CST)
1905 tem = fold_convert_const (NOP_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1909 else if (TREE_CODE (arg) == FIXED_CST)
1911 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1912 if (tem != NULL_TREE)
1916 switch (TREE_CODE (orig))
1919 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1920 case POINTER_TYPE: case REFERENCE_TYPE:
1921 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1924 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1926 case FIXED_POINT_TYPE:
1927 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1930 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1931 return fold_convert_loc (loc, type, tem);
1937 case FIXED_POINT_TYPE:
1938 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1939 || TREE_CODE (arg) == REAL_CST)
1941 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1943 goto fold_convert_exit;
1946 switch (TREE_CODE (orig))
1948 case FIXED_POINT_TYPE:
1953 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1956 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1957 return fold_convert_loc (loc, type, tem);
1964 switch (TREE_CODE (orig))
1967 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1968 case POINTER_TYPE: case REFERENCE_TYPE:
1970 case FIXED_POINT_TYPE:
1971 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1972 fold_convert_loc (loc, TREE_TYPE (type), arg),
1973 fold_convert_loc (loc, TREE_TYPE (type),
1974 integer_zero_node));
1979 if (TREE_CODE (arg) == COMPLEX_EXPR)
1981 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1982 TREE_OPERAND (arg, 0));
1983 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1984 TREE_OPERAND (arg, 1));
1985 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1988 arg = save_expr (arg);
1989 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1990 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1991 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1992 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1993 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2001 if (integer_zerop (arg))
2002 return build_zero_vector (type);
2003 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2004 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2005 || TREE_CODE (orig) == VECTOR_TYPE);
2006 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2009 tem = fold_ignored_result (arg);
2010 if (TREE_CODE (tem) == MODIFY_EXPR)
2011 goto fold_convert_exit;
2012 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2018 protected_set_expr_location (tem, loc);
2022 /* Return false if expr can be assumed not to be an lvalue, true
2026 maybe_lvalue_p (const_tree x)
2028 /* We only need to wrap lvalue tree codes. */
2029 switch (TREE_CODE (x))
2042 case ARRAY_RANGE_REF:
2048 case PREINCREMENT_EXPR:
2049 case PREDECREMENT_EXPR:
2051 case TRY_CATCH_EXPR:
2052 case WITH_CLEANUP_EXPR:
2061 /* Assume the worst for front-end tree codes. */
2062 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2070 /* Return an expr equal to X but certainly not valid as an lvalue. */
2073 non_lvalue_loc (location_t loc, tree x)
2075 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2080 if (! maybe_lvalue_p (x))
2082 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2083 SET_EXPR_LOCATION (x, loc);
2087 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2088 Zero means allow extended lvalues. */
2090 int pedantic_lvalues;
2092 /* When pedantic, return an expr equal to X but certainly not valid as a
2093 pedantic lvalue. Otherwise, return X. */
2096 pedantic_non_lvalue_loc (location_t loc, tree x)
2098 if (pedantic_lvalues)
2099 return non_lvalue_loc (loc, x);
2101 if (CAN_HAVE_LOCATION_P (x)
2102 && EXPR_LOCATION (x) != loc
2103 && !(TREE_CODE (x) == SAVE_EXPR
2104 || TREE_CODE (x) == TARGET_EXPR
2105 || TREE_CODE (x) == BIND_EXPR))
2108 SET_EXPR_LOCATION (x, loc);
2113 /* Given a tree comparison code, return the code that is the logical inverse
2114 of the given code. It is not safe to do this for floating-point
2115 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2116 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2119 invert_tree_comparison (enum tree_code code, bool honor_nans)
2121 if (honor_nans && flag_trapping_math)
2131 return honor_nans ? UNLE_EXPR : LE_EXPR;
2133 return honor_nans ? UNLT_EXPR : LT_EXPR;
2135 return honor_nans ? UNGE_EXPR : GE_EXPR;
2137 return honor_nans ? UNGT_EXPR : GT_EXPR;
2151 return UNORDERED_EXPR;
2152 case UNORDERED_EXPR:
2153 return ORDERED_EXPR;
2159 /* Similar, but return the comparison that results if the operands are
2160 swapped. This is safe for floating-point. */
2163 swap_tree_comparison (enum tree_code code)
2170 case UNORDERED_EXPR:
2196 /* Convert a comparison tree code from an enum tree_code representation
2197 into a compcode bit-based encoding. This function is the inverse of
2198 compcode_to_comparison. */
2200 static enum comparison_code
2201 comparison_to_compcode (enum tree_code code)
2218 return COMPCODE_ORD;
2219 case UNORDERED_EXPR:
2220 return COMPCODE_UNORD;
2222 return COMPCODE_UNLT;
2224 return COMPCODE_UNEQ;
2226 return COMPCODE_UNLE;
2228 return COMPCODE_UNGT;
2230 return COMPCODE_LTGT;
2232 return COMPCODE_UNGE;
2238 /* Convert a compcode bit-based encoding of a comparison operator back
2239 to GCC's enum tree_code representation. This function is the
2240 inverse of comparison_to_compcode. */
2242 static enum tree_code
2243 compcode_to_comparison (enum comparison_code code)
2260 return ORDERED_EXPR;
2261 case COMPCODE_UNORD:
2262 return UNORDERED_EXPR;
2280 /* Return a tree for the comparison which is the combination of
2281 doing the AND or OR (depending on CODE) of the two operations LCODE
2282 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2283 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2284 if this makes the transformation invalid. */
2287 combine_comparisons (location_t loc,
2288 enum tree_code code, enum tree_code lcode,
2289 enum tree_code rcode, tree truth_type,
2290 tree ll_arg, tree lr_arg)
2292 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2293 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2294 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2299 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2300 compcode = lcompcode & rcompcode;
2303 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2304 compcode = lcompcode | rcompcode;
2313 /* Eliminate unordered comparisons, as well as LTGT and ORD
2314 which are not used unless the mode has NaNs. */
2315 compcode &= ~COMPCODE_UNORD;
2316 if (compcode == COMPCODE_LTGT)
2317 compcode = COMPCODE_NE;
2318 else if (compcode == COMPCODE_ORD)
2319 compcode = COMPCODE_TRUE;
2321 else if (flag_trapping_math)
2323 /* Check that the original operation and the optimized ones will trap
2324 under the same condition. */
2325 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2326 && (lcompcode != COMPCODE_EQ)
2327 && (lcompcode != COMPCODE_ORD);
2328 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2329 && (rcompcode != COMPCODE_EQ)
2330 && (rcompcode != COMPCODE_ORD);
2331 bool trap = (compcode & COMPCODE_UNORD) == 0
2332 && (compcode != COMPCODE_EQ)
2333 && (compcode != COMPCODE_ORD);
2335 /* In a short-circuited boolean expression the LHS might be
2336 such that the RHS, if evaluated, will never trap. For
2337 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2338 if neither x nor y is NaN. (This is a mixed blessing: for
2339 example, the expression above will never trap, hence
2340 optimizing it to x < y would be invalid). */
2341 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2342 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2345 /* If the comparison was short-circuited, and only the RHS
2346 trapped, we may now generate a spurious trap. */
2348 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2351 /* If we changed the conditions that cause a trap, we lose. */
2352 if ((ltrap || rtrap) != trap)
2356 if (compcode == COMPCODE_TRUE)
2357 return constant_boolean_node (true, truth_type);
2358 else if (compcode == COMPCODE_FALSE)
2359 return constant_boolean_node (false, truth_type);
2362 enum tree_code tcode;
2364 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2365 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2369 /* Return nonzero if two operands (typically of the same tree node)
2370 are necessarily equal. If either argument has side-effects this
2371 function returns zero. FLAGS modifies behavior as follows:
2373 If OEP_ONLY_CONST is set, only return nonzero for constants.
2374 This function tests whether the operands are indistinguishable;
2375 it does not test whether they are equal using C's == operation.
2376 The distinction is important for IEEE floating point, because
2377 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2378 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2380 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2381 even though it may hold multiple values during a function.
2382 This is because a GCC tree node guarantees that nothing else is
2383 executed between the evaluation of its "operands" (which may often
2384 be evaluated in arbitrary order). Hence if the operands themselves
2385 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2386 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2387 unset means assuming isochronic (or instantaneous) tree equivalence.
2388 Unless comparing arbitrary expression trees, such as from different
2389 statements, this flag can usually be left unset.
2391 If OEP_PURE_SAME is set, then pure functions with identical arguments
2392 are considered the same. It is used when the caller has other ways
2393 to ensure that global memory is unchanged in between. */
2396 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2398 /* If either is ERROR_MARK, they aren't equal. */
2399 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2400 || TREE_TYPE (arg0) == error_mark_node
2401 || TREE_TYPE (arg1) == error_mark_node)
2404 /* Similar, if either does not have a type (like a released SSA name),
2405 they aren't equal. */
2406 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2409 /* Check equality of integer constants before bailing out due to
2410 precision differences. */
2411 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2412 return tree_int_cst_equal (arg0, arg1);
2414 /* If both types don't have the same signedness, then we can't consider
2415 them equal. We must check this before the STRIP_NOPS calls
2416 because they may change the signedness of the arguments. As pointers
2417 strictly don't have a signedness, require either two pointers or
2418 two non-pointers as well. */
2419 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2420 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2423 /* We cannot consider pointers to different address space equal. */
2424 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2425 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2426 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2429 /* If both types don't have the same precision, then it is not safe
2431 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2437 /* In case both args are comparisons but with different comparison
2438 code, try to swap the comparison operands of one arg to produce
2439 a match and compare that variant. */
2440 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2441 && COMPARISON_CLASS_P (arg0)
2442 && COMPARISON_CLASS_P (arg1))
2444 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2446 if (TREE_CODE (arg0) == swap_code)
2447 return operand_equal_p (TREE_OPERAND (arg0, 0),
2448 TREE_OPERAND (arg1, 1), flags)
2449 && operand_equal_p (TREE_OPERAND (arg0, 1),
2450 TREE_OPERAND (arg1, 0), flags);
2453 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2454 /* This is needed for conversions and for COMPONENT_REF.
2455 Might as well play it safe and always test this. */
2456 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2457 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2458 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2461 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2462 We don't care about side effects in that case because the SAVE_EXPR
2463 takes care of that for us. In all other cases, two expressions are
2464 equal if they have no side effects. If we have two identical
2465 expressions with side effects that should be treated the same due
2466 to the only side effects being identical SAVE_EXPR's, that will
2467 be detected in the recursive calls below. */
2468 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2469 && (TREE_CODE (arg0) == SAVE_EXPR
2470 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2473 /* Next handle constant cases, those for which we can return 1 even
2474 if ONLY_CONST is set. */
2475 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2476 switch (TREE_CODE (arg0))
2479 return tree_int_cst_equal (arg0, arg1);
2482 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2483 TREE_FIXED_CST (arg1));
2486 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2487 TREE_REAL_CST (arg1)))
2491 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2493 /* If we do not distinguish between signed and unsigned zero,
2494 consider them equal. */
2495 if (real_zerop (arg0) && real_zerop (arg1))
2504 v1 = TREE_VECTOR_CST_ELTS (arg0);
2505 v2 = TREE_VECTOR_CST_ELTS (arg1);
2508 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2511 v1 = TREE_CHAIN (v1);
2512 v2 = TREE_CHAIN (v2);
2519 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2521 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2525 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2526 && ! memcmp (TREE_STRING_POINTER (arg0),
2527 TREE_STRING_POINTER (arg1),
2528 TREE_STRING_LENGTH (arg0)));
2531 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2537 if (flags & OEP_ONLY_CONST)
2540 /* Define macros to test an operand from arg0 and arg1 for equality and a
2541 variant that allows null and views null as being different from any
2542 non-null value. In the latter case, if either is null, the both
2543 must be; otherwise, do the normal comparison. */
2544 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2545 TREE_OPERAND (arg1, N), flags)
2547 #define OP_SAME_WITH_NULL(N) \
2548 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2549 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2551 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2554 /* Two conversions are equal only if signedness and modes match. */
2555 switch (TREE_CODE (arg0))
2558 case FIX_TRUNC_EXPR:
2559 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2560 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2570 case tcc_comparison:
2572 if (OP_SAME (0) && OP_SAME (1))
2575 /* For commutative ops, allow the other order. */
2576 return (commutative_tree_code (TREE_CODE (arg0))
2577 && operand_equal_p (TREE_OPERAND (arg0, 0),
2578 TREE_OPERAND (arg1, 1), flags)
2579 && operand_equal_p (TREE_OPERAND (arg0, 1),
2580 TREE_OPERAND (arg1, 0), flags));
2583 /* If either of the pointer (or reference) expressions we are
2584 dereferencing contain a side effect, these cannot be equal. */
2585 if (TREE_SIDE_EFFECTS (arg0)
2586 || TREE_SIDE_EFFECTS (arg1))
2589 switch (TREE_CODE (arg0))
2597 /* Require equal access sizes, and similar pointer types.
2598 We can have incomplete types for array references of
2599 variable-sized arrays from the Fortran frontent
2601 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2602 || (TYPE_SIZE (TREE_TYPE (arg0))
2603 && TYPE_SIZE (TREE_TYPE (arg1))
2604 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2605 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2606 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2607 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2608 && OP_SAME (0) && OP_SAME (1));
2611 case ARRAY_RANGE_REF:
2612 /* Operands 2 and 3 may be null.
2613 Compare the array index by value if it is constant first as we
2614 may have different types but same value here. */
2616 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2617 TREE_OPERAND (arg1, 1))
2619 && OP_SAME_WITH_NULL (2)
2620 && OP_SAME_WITH_NULL (3));
2623 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2624 may be NULL when we're called to compare MEM_EXPRs. */
2625 return OP_SAME_WITH_NULL (0)
2627 && OP_SAME_WITH_NULL (2);
2630 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 case tcc_expression:
2637 switch (TREE_CODE (arg0))
2640 case TRUTH_NOT_EXPR:
2643 case TRUTH_ANDIF_EXPR:
2644 case TRUTH_ORIF_EXPR:
2645 return OP_SAME (0) && OP_SAME (1);
2648 case WIDEN_MULT_PLUS_EXPR:
2649 case WIDEN_MULT_MINUS_EXPR:
2652 /* The multiplcation operands are commutative. */
2655 case TRUTH_AND_EXPR:
2657 case TRUTH_XOR_EXPR:
2658 if (OP_SAME (0) && OP_SAME (1))
2661 /* Otherwise take into account this is a commutative operation. */
2662 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2663 TREE_OPERAND (arg1, 1), flags)
2664 && operand_equal_p (TREE_OPERAND (arg0, 1),
2665 TREE_OPERAND (arg1, 0), flags));
2670 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2677 switch (TREE_CODE (arg0))
2680 /* If the CALL_EXPRs call different functions, then they
2681 clearly can not be equal. */
2682 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2687 unsigned int cef = call_expr_flags (arg0);
2688 if (flags & OEP_PURE_SAME)
2689 cef &= ECF_CONST | ECF_PURE;
2696 /* Now see if all the arguments are the same. */
2698 const_call_expr_arg_iterator iter0, iter1;
2700 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2701 a1 = first_const_call_expr_arg (arg1, &iter1);
2703 a0 = next_const_call_expr_arg (&iter0),
2704 a1 = next_const_call_expr_arg (&iter1))
2705 if (! operand_equal_p (a0, a1, flags))
2708 /* If we get here and both argument lists are exhausted
2709 then the CALL_EXPRs are equal. */
2710 return ! (a0 || a1);
2716 case tcc_declaration:
2717 /* Consider __builtin_sqrt equal to sqrt. */
2718 return (TREE_CODE (arg0) == FUNCTION_DECL
2719 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2720 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2721 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2728 #undef OP_SAME_WITH_NULL
2731 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2732 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2734 When in doubt, return 0. */
2737 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2739 int unsignedp1, unsignedpo;
2740 tree primarg0, primarg1, primother;
2741 unsigned int correct_width;
2743 if (operand_equal_p (arg0, arg1, 0))
2746 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2747 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2750 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2751 and see if the inner values are the same. This removes any
2752 signedness comparison, which doesn't matter here. */
2753 primarg0 = arg0, primarg1 = arg1;
2754 STRIP_NOPS (primarg0);
2755 STRIP_NOPS (primarg1);
2756 if (operand_equal_p (primarg0, primarg1, 0))
2759 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2760 actual comparison operand, ARG0.
2762 First throw away any conversions to wider types
2763 already present in the operands. */
2765 primarg1 = get_narrower (arg1, &unsignedp1);
2766 primother = get_narrower (other, &unsignedpo);
2768 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2769 if (unsignedp1 == unsignedpo
2770 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2771 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2773 tree type = TREE_TYPE (arg0);
2775 /* Make sure shorter operand is extended the right way
2776 to match the longer operand. */
2777 primarg1 = fold_convert (signed_or_unsigned_type_for
2778 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2780 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2787 /* See if ARG is an expression that is either a comparison or is performing
2788 arithmetic on comparisons. The comparisons must only be comparing
2789 two different values, which will be stored in *CVAL1 and *CVAL2; if
2790 they are nonzero it means that some operands have already been found.
2791 No variables may be used anywhere else in the expression except in the
2792 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2793 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2795 If this is true, return 1. Otherwise, return zero. */
2798 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2800 enum tree_code code = TREE_CODE (arg);
2801 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2803 /* We can handle some of the tcc_expression cases here. */
2804 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2806 else if (tclass == tcc_expression
2807 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2808 || code == COMPOUND_EXPR))
2809 tclass = tcc_binary;
2811 else if (tclass == tcc_expression && code == SAVE_EXPR
2812 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2814 /* If we've already found a CVAL1 or CVAL2, this expression is
2815 two complex to handle. */
2816 if (*cval1 || *cval2)
2826 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2829 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2830 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2831 cval1, cval2, save_p));
2836 case tcc_expression:
2837 if (code == COND_EXPR)
2838 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2841 cval1, cval2, save_p)
2842 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2843 cval1, cval2, save_p));
2846 case tcc_comparison:
2847 /* First see if we can handle the first operand, then the second. For
2848 the second operand, we know *CVAL1 can't be zero. It must be that
2849 one side of the comparison is each of the values; test for the
2850 case where this isn't true by failing if the two operands
2853 if (operand_equal_p (TREE_OPERAND (arg, 0),
2854 TREE_OPERAND (arg, 1), 0))
2858 *cval1 = TREE_OPERAND (arg, 0);
2859 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2861 else if (*cval2 == 0)
2862 *cval2 = TREE_OPERAND (arg, 0);
2863 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2868 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2870 else if (*cval2 == 0)
2871 *cval2 = TREE_OPERAND (arg, 1);
2872 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2884 /* ARG is a tree that is known to contain just arithmetic operations and
2885 comparisons. Evaluate the operations in the tree substituting NEW0 for
2886 any occurrence of OLD0 as an operand of a comparison and likewise for
2890 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2891 tree old1, tree new1)
2893 tree type = TREE_TYPE (arg);
2894 enum tree_code code = TREE_CODE (arg);
2895 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2897 /* We can handle some of the tcc_expression cases here. */
2898 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2900 else if (tclass == tcc_expression
2901 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2902 tclass = tcc_binary;
2907 return fold_build1_loc (loc, code, type,
2908 eval_subst (loc, TREE_OPERAND (arg, 0),
2909 old0, new0, old1, new1));
2912 return fold_build2_loc (loc, code, type,
2913 eval_subst (loc, TREE_OPERAND (arg, 0),
2914 old0, new0, old1, new1),
2915 eval_subst (loc, TREE_OPERAND (arg, 1),
2916 old0, new0, old1, new1));
2918 case tcc_expression:
2922 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2926 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2930 return fold_build3_loc (loc, code, type,
2931 eval_subst (loc, TREE_OPERAND (arg, 0),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 1),
2934 old0, new0, old1, new1),
2935 eval_subst (loc, TREE_OPERAND (arg, 2),
2936 old0, new0, old1, new1));
2940 /* Fall through - ??? */
2942 case tcc_comparison:
2944 tree arg0 = TREE_OPERAND (arg, 0);
2945 tree arg1 = TREE_OPERAND (arg, 1);
2947 /* We need to check both for exact equality and tree equality. The
2948 former will be true if the operand has a side-effect. In that
2949 case, we know the operand occurred exactly once. */
2951 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2953 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2956 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2958 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2961 return fold_build2_loc (loc, code, type, arg0, arg1);
2969 /* Return a tree for the case when the result of an expression is RESULT
2970 converted to TYPE and OMITTED was previously an operand of the expression
2971 but is now not needed (e.g., we folded OMITTED * 0).
2973 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2974 the conversion of RESULT to TYPE. */
2977 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2979 tree t = fold_convert_loc (loc, type, result);
2981 /* If the resulting operand is an empty statement, just return the omitted
2982 statement casted to void. */
2983 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2985 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2986 goto omit_one_operand_exit;
2989 if (TREE_SIDE_EFFECTS (omitted))
2991 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2992 goto omit_one_operand_exit;
2995 return non_lvalue_loc (loc, t);
2997 omit_one_operand_exit:
2998 protected_set_expr_location (t, loc);
3002 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3005 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3008 tree t = fold_convert_loc (loc, type, result);
3010 /* If the resulting operand is an empty statement, just return the omitted
3011 statement casted to void. */
3012 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3014 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3015 goto pedantic_omit_one_operand_exit;
3018 if (TREE_SIDE_EFFECTS (omitted))
3020 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3021 goto pedantic_omit_one_operand_exit;
3024 return pedantic_non_lvalue_loc (loc, t);
3026 pedantic_omit_one_operand_exit:
3027 protected_set_expr_location (t, loc);
3031 /* Return a tree for the case when the result of an expression is RESULT
3032 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3033 of the expression but are now not needed.
3035 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3036 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3037 evaluated before OMITTED2. Otherwise, if neither has side effects,
3038 just do the conversion of RESULT to TYPE. */
3041 omit_two_operands_loc (location_t loc, tree type, tree result,
3042 tree omitted1, tree omitted2)
3044 tree t = fold_convert_loc (loc, type, result);
3046 if (TREE_SIDE_EFFECTS (omitted2))
3048 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3049 SET_EXPR_LOCATION (t, loc);
3051 if (TREE_SIDE_EFFECTS (omitted1))
3053 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3054 SET_EXPR_LOCATION (t, loc);
3057 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3061 /* Return a simplified tree node for the truth-negation of ARG. This
3062 never alters ARG itself. We assume that ARG is an operation that
3063 returns a truth value (0 or 1).
3065 FIXME: one would think we would fold the result, but it causes
3066 problems with the dominator optimizer. */
3069 fold_truth_not_expr (location_t loc, tree arg)
3071 tree t, type = TREE_TYPE (arg);
3072 enum tree_code code = TREE_CODE (arg);
3073 location_t loc1, loc2;
3075 /* If this is a comparison, we can simply invert it, except for
3076 floating-point non-equality comparisons, in which case we just
3077 enclose a TRUTH_NOT_EXPR around what we have. */
3079 if (TREE_CODE_CLASS (code) == tcc_comparison)
3081 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3082 if (FLOAT_TYPE_P (op_type)
3083 && flag_trapping_math
3084 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3085 && code != NE_EXPR && code != EQ_EXPR)
3088 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3089 if (code == ERROR_MARK)
3092 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3093 SET_EXPR_LOCATION (t, loc);
3100 return constant_boolean_node (integer_zerop (arg), type);
3102 case TRUTH_AND_EXPR:
3103 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3104 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3105 if (loc1 == UNKNOWN_LOCATION)
3107 if (loc2 == UNKNOWN_LOCATION)
3109 t = build2 (TRUTH_OR_EXPR, type,
3110 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3111 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3115 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3116 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3117 if (loc1 == UNKNOWN_LOCATION)
3119 if (loc2 == UNKNOWN_LOCATION)
3121 t = build2 (TRUTH_AND_EXPR, type,
3122 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3123 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3126 case TRUTH_XOR_EXPR:
3127 /* Here we can invert either operand. We invert the first operand
3128 unless the second operand is a TRUTH_NOT_EXPR in which case our
3129 result is the XOR of the first operand with the inside of the
3130 negation of the second operand. */
3132 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3133 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3134 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3136 t = build2 (TRUTH_XOR_EXPR, type,
3137 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3138 TREE_OPERAND (arg, 1));
3141 case TRUTH_ANDIF_EXPR:
3142 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3143 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3144 if (loc1 == UNKNOWN_LOCATION)
3146 if (loc2 == UNKNOWN_LOCATION)
3148 t = build2 (TRUTH_ORIF_EXPR, type,
3149 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3150 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3153 case TRUTH_ORIF_EXPR:
3154 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3155 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3156 if (loc1 == UNKNOWN_LOCATION)
3158 if (loc2 == UNKNOWN_LOCATION)
3160 t = build2 (TRUTH_ANDIF_EXPR, type,
3161 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3162 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3165 case TRUTH_NOT_EXPR:
3166 return TREE_OPERAND (arg, 0);
3170 tree arg1 = TREE_OPERAND (arg, 1);
3171 tree arg2 = TREE_OPERAND (arg, 2);
3173 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3174 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3175 if (loc1 == UNKNOWN_LOCATION)
3177 if (loc2 == UNKNOWN_LOCATION)
3180 /* A COND_EXPR may have a throw as one operand, which
3181 then has void type. Just leave void operands
3183 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3184 VOID_TYPE_P (TREE_TYPE (arg1))
3185 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3186 VOID_TYPE_P (TREE_TYPE (arg2))
3187 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3192 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3193 if (loc1 == UNKNOWN_LOCATION)
3195 t = build2 (COMPOUND_EXPR, type,
3196 TREE_OPERAND (arg, 0),
3197 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3200 case NON_LVALUE_EXPR:
3201 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3202 if (loc1 == UNKNOWN_LOCATION)
3204 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3207 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3209 t = build1 (TRUTH_NOT_EXPR, type, arg);
3213 /* ... fall through ... */
3216 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3217 if (loc1 == UNKNOWN_LOCATION)
3219 t = build1 (TREE_CODE (arg), type,
3220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3224 if (!integer_onep (TREE_OPERAND (arg, 1)))
3226 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3230 t = build1 (TRUTH_NOT_EXPR, type, arg);
3233 case CLEANUP_POINT_EXPR:
3234 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3235 if (loc1 == UNKNOWN_LOCATION)
3237 t = build1 (CLEANUP_POINT_EXPR, type,
3238 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3247 SET_EXPR_LOCATION (t, loc);
3252 /* Return a simplified tree node for the truth-negation of ARG. This
3253 never alters ARG itself. We assume that ARG is an operation that
3254 returns a truth value (0 or 1).
3256 FIXME: one would think we would fold the result, but it causes
3257 problems with the dominator optimizer. */
3260 invert_truthvalue_loc (location_t loc, tree arg)
3264 if (TREE_CODE (arg) == ERROR_MARK)
3267 tem = fold_truth_not_expr (loc, arg);
3270 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3271 SET_EXPR_LOCATION (tem, loc);
3277 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3278 operands are another bit-wise operation with a common input. If so,
3279 distribute the bit operations to save an operation and possibly two if
3280 constants are involved. For example, convert
3281 (A | B) & (A | C) into A | (B & C)
3282 Further simplification will occur if B and C are constants.
3284 If this optimization cannot be done, 0 will be returned. */
3287 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3288 tree arg0, tree arg1)
3293 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3294 || TREE_CODE (arg0) == code
3295 || (TREE_CODE (arg0) != BIT_AND_EXPR
3296 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3299 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3301 common = TREE_OPERAND (arg0, 0);
3302 left = TREE_OPERAND (arg0, 1);
3303 right = TREE_OPERAND (arg1, 1);
3305 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3307 common = TREE_OPERAND (arg0, 0);
3308 left = TREE_OPERAND (arg0, 1);
3309 right = TREE_OPERAND (arg1, 0);
3311 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3313 common = TREE_OPERAND (arg0, 1);
3314 left = TREE_OPERAND (arg0, 0);
3315 right = TREE_OPERAND (arg1, 1);
3317 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3319 common = TREE_OPERAND (arg0, 1);
3320 left = TREE_OPERAND (arg0, 0);
3321 right = TREE_OPERAND (arg1, 0);
3326 common = fold_convert_loc (loc, type, common);
3327 left = fold_convert_loc (loc, type, left);
3328 right = fold_convert_loc (loc, type, right);
3329 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3330 fold_build2_loc (loc, code, type, left, right));
3333 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3334 with code CODE. This optimization is unsafe. */
3336 distribute_real_division (location_t loc, enum tree_code code, tree type,
3337 tree arg0, tree arg1)
3339 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3340 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3342 /* (A / C) +- (B / C) -> (A +- B) / C. */
3344 && operand_equal_p (TREE_OPERAND (arg0, 1),
3345 TREE_OPERAND (arg1, 1), 0))
3346 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3347 fold_build2_loc (loc, code, type,
3348 TREE_OPERAND (arg0, 0),
3349 TREE_OPERAND (arg1, 0)),
3350 TREE_OPERAND (arg0, 1));
3352 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3353 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3354 TREE_OPERAND (arg1, 0), 0)
3355 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3356 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3358 REAL_VALUE_TYPE r0, r1;
3359 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3360 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3362 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3364 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3365 real_arithmetic (&r0, code, &r0, &r1);
3366 return fold_build2_loc (loc, MULT_EXPR, type,
3367 TREE_OPERAND (arg0, 0),
3368 build_real (type, r0));
3374 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3375 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3378 make_bit_field_ref (location_t loc, tree inner, tree type,
3379 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3381 tree result, bftype;
3385 tree size = TYPE_SIZE (TREE_TYPE (inner));
3386 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3387 || POINTER_TYPE_P (TREE_TYPE (inner)))
3388 && host_integerp (size, 0)
3389 && tree_low_cst (size, 0) == bitsize)
3390 return fold_convert_loc (loc, type, inner);
3394 if (TYPE_PRECISION (bftype) != bitsize
3395 || TYPE_UNSIGNED (bftype) == !unsignedp)
3396 bftype = build_nonstandard_integer_type (bitsize, 0);
3398 result = build3 (BIT_FIELD_REF, bftype, inner,
3399 size_int (bitsize), bitsize_int (bitpos));
3400 SET_EXPR_LOCATION (result, loc);
3403 result = fold_convert_loc (loc, type, result);
3408 /* Optimize a bit-field compare.
3410 There are two cases: First is a compare against a constant and the
3411 second is a comparison of two items where the fields are at the same
3412 bit position relative to the start of a chunk (byte, halfword, word)
3413 large enough to contain it. In these cases we can avoid the shift
3414 implicit in bitfield extractions.
3416 For constants, we emit a compare of the shifted constant with the
3417 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3418 compared. For two fields at the same position, we do the ANDs with the
3419 similar mask and compare the result of the ANDs.
3421 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3422 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3423 are the left and right operands of the comparison, respectively.
3425 If the optimization described above can be done, we return the resulting
3426 tree. Otherwise we return zero. */
3429 optimize_bit_field_compare (location_t loc, enum tree_code code,
3430 tree compare_type, tree lhs, tree rhs)
3432 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3433 tree type = TREE_TYPE (lhs);
3434 tree signed_type, unsigned_type;
3435 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3436 enum machine_mode lmode, rmode, nmode;
3437 int lunsignedp, runsignedp;
3438 int lvolatilep = 0, rvolatilep = 0;
3439 tree linner, rinner = NULL_TREE;
3443 /* Get all the information about the extractions being done. If the bit size
3444 if the same as the size of the underlying object, we aren't doing an
3445 extraction at all and so can do nothing. We also don't want to
3446 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3447 then will no longer be able to replace it. */
3448 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3449 &lunsignedp, &lvolatilep, false);
3450 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3451 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3456 /* If this is not a constant, we can only do something if bit positions,
3457 sizes, and signedness are the same. */
3458 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3459 &runsignedp, &rvolatilep, false);
3461 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3462 || lunsignedp != runsignedp || offset != 0
3463 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3467 /* See if we can find a mode to refer to this field. We should be able to,
3468 but fail if we can't. */
3470 && GET_MODE_BITSIZE (lmode) > 0
3471 && flag_strict_volatile_bitfields > 0)
3474 nmode = get_best_mode (lbitsize, lbitpos,
3475 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3476 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3477 TYPE_ALIGN (TREE_TYPE (rinner))),
3478 word_mode, lvolatilep || rvolatilep);
3479 if (nmode == VOIDmode)
3482 /* Set signed and unsigned types of the precision of this mode for the
3484 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3485 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3487 /* Compute the bit position and size for the new reference and our offset
3488 within it. If the new reference is the same size as the original, we
3489 won't optimize anything, so return zero. */
3490 nbitsize = GET_MODE_BITSIZE (nmode);
3491 nbitpos = lbitpos & ~ (nbitsize - 1);
3493 if (nbitsize == lbitsize)
3496 if (BYTES_BIG_ENDIAN)
3497 lbitpos = nbitsize - lbitsize - lbitpos;
3499 /* Make the mask to be used against the extracted field. */
3500 mask = build_int_cst_type (unsigned_type, -1);
3501 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3502 mask = const_binop (RSHIFT_EXPR, mask,
3503 size_int (nbitsize - lbitsize - lbitpos));
3506 /* If not comparing with constant, just rework the comparison
3508 return fold_build2_loc (loc, code, compare_type,
3509 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3510 make_bit_field_ref (loc, linner,
3515 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3516 make_bit_field_ref (loc, rinner,
3522 /* Otherwise, we are handling the constant case. See if the constant is too
3523 big for the field. Warn and return a tree of for 0 (false) if so. We do
3524 this not only for its own sake, but to avoid having to test for this
3525 error case below. If we didn't, we might generate wrong code.
3527 For unsigned fields, the constant shifted right by the field length should
3528 be all zero. For signed fields, the high-order bits should agree with
3533 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3534 fold_convert_loc (loc,
3535 unsigned_type, rhs),
3536 size_int (lbitsize))))
3538 warning (0, "comparison is always %d due to width of bit-field",
3540 return constant_boolean_node (code == NE_EXPR, compare_type);
3545 tree tem = const_binop (RSHIFT_EXPR,
3546 fold_convert_loc (loc, signed_type, rhs),
3547 size_int (lbitsize - 1));
3548 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3550 warning (0, "comparison is always %d due to width of bit-field",
3552 return constant_boolean_node (code == NE_EXPR, compare_type);
3556 /* Single-bit compares should always be against zero. */
3557 if (lbitsize == 1 && ! integer_zerop (rhs))
3559 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3560 rhs = build_int_cst (type, 0);
3563 /* Make a new bitfield reference, shift the constant over the
3564 appropriate number of bits and mask it with the computed mask
3565 (in case this was a signed field). If we changed it, make a new one. */
3566 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3569 TREE_SIDE_EFFECTS (lhs) = 1;
3570 TREE_THIS_VOLATILE (lhs) = 1;
3573 rhs = const_binop (BIT_AND_EXPR,
3574 const_binop (LSHIFT_EXPR,
3575 fold_convert_loc (loc, unsigned_type, rhs),
3576 size_int (lbitpos)),
3579 lhs = build2 (code, compare_type,
3580 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3582 SET_EXPR_LOCATION (lhs, loc);
3586 /* Subroutine for fold_truthop: decode a field reference.
3588 If EXP is a comparison reference, we return the innermost reference.
3590 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3591 set to the starting bit number.
3593 If the innermost field can be completely contained in a mode-sized
3594 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3596 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3597 otherwise it is not changed.
3599 *PUNSIGNEDP is set to the signedness of the field.
3601 *PMASK is set to the mask used. This is either contained in a
3602 BIT_AND_EXPR or derived from the width of the field.
3604 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3606 Return 0 if this is not a component reference or is one that we can't
3607 do anything with. */
3610 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3611 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3612 int *punsignedp, int *pvolatilep,
3613 tree *pmask, tree *pand_mask)
3615 tree outer_type = 0;
3617 tree mask, inner, offset;
3619 unsigned int precision;
3621 /* All the optimizations using this function assume integer fields.
3622 There are problems with FP fields since the type_for_size call
3623 below can fail for, e.g., XFmode. */
3624 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3627 /* We are interested in the bare arrangement of bits, so strip everything
3628 that doesn't affect the machine mode. However, record the type of the
3629 outermost expression if it may matter below. */
3630 if (CONVERT_EXPR_P (exp)
3631 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3632 outer_type = TREE_TYPE (exp);
3635 if (TREE_CODE (exp) == BIT_AND_EXPR)
3637 and_mask = TREE_OPERAND (exp, 1);
3638 exp = TREE_OPERAND (exp, 0);
3639 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3640 if (TREE_CODE (and_mask) != INTEGER_CST)
3644 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3645 punsignedp, pvolatilep, false);
3646 if ((inner == exp && and_mask == 0)
3647 || *pbitsize < 0 || offset != 0
3648 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3651 /* If the number of bits in the reference is the same as the bitsize of
3652 the outer type, then the outer type gives the signedness. Otherwise
3653 (in case of a small bitfield) the signedness is unchanged. */
3654 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3655 *punsignedp = TYPE_UNSIGNED (outer_type);
3657 /* Compute the mask to access the bitfield. */
3658 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3659 precision = TYPE_PRECISION (unsigned_type);
3661 mask = build_int_cst_type (unsigned_type, -1);
3663 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3664 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3666 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3668 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3669 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3672 *pand_mask = and_mask;
3676 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3680 all_ones_mask_p (const_tree mask, int size)
3682 tree type = TREE_TYPE (mask);
3683 unsigned int precision = TYPE_PRECISION (type);
3686 tmask = build_int_cst_type (signed_type_for (type), -1);
3689 tree_int_cst_equal (mask,
3690 const_binop (RSHIFT_EXPR,
3691 const_binop (LSHIFT_EXPR, tmask,
3692 size_int (precision - size)),
3693 size_int (precision - size)));
3696 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3697 represents the sign bit of EXP's type. If EXP represents a sign
3698 or zero extension, also test VAL against the unextended type.
3699 The return value is the (sub)expression whose sign bit is VAL,
3700 or NULL_TREE otherwise. */
3703 sign_bit_p (tree exp, const_tree val)
3705 unsigned HOST_WIDE_INT mask_lo, lo;
3706 HOST_WIDE_INT mask_hi, hi;
3710 /* Tree EXP must have an integral type. */
3711 t = TREE_TYPE (exp);
3712 if (! INTEGRAL_TYPE_P (t))
3715 /* Tree VAL must be an integer constant. */
3716 if (TREE_CODE (val) != INTEGER_CST
3717 || TREE_OVERFLOW (val))
3720 width = TYPE_PRECISION (t);
3721 if (width > HOST_BITS_PER_WIDE_INT)
3723 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3726 mask_hi = ((unsigned HOST_WIDE_INT) -1
3727 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3733 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3736 mask_lo = ((unsigned HOST_WIDE_INT) -1
3737 >> (HOST_BITS_PER_WIDE_INT - width));
3740 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3741 treat VAL as if it were unsigned. */
3742 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3743 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3746 /* Handle extension from a narrower type. */
3747 if (TREE_CODE (exp) == NOP_EXPR
3748 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3749 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3754 /* Subroutine for fold_truthop: determine if an operand is simple enough
3755 to be evaluated unconditionally. */
3758 simple_operand_p (const_tree exp)
3760 /* Strip any conversions that don't change the machine mode. */
3763 return (CONSTANT_CLASS_P (exp)
3764 || TREE_CODE (exp) == SSA_NAME
3766 && ! TREE_ADDRESSABLE (exp)
3767 && ! TREE_THIS_VOLATILE (exp)
3768 && ! DECL_NONLOCAL (exp)
3769 /* Don't regard global variables as simple. They may be
3770 allocated in ways unknown to the compiler (shared memory,
3771 #pragma weak, etc). */
3772 && ! TREE_PUBLIC (exp)
3773 && ! DECL_EXTERNAL (exp)
3774 /* Loading a static variable is unduly expensive, but global
3775 registers aren't expensive. */
3776 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3779 /* The following functions are subroutines to fold_range_test and allow it to
3780 try to change a logical combination of comparisons into a range test.
3783 X == 2 || X == 3 || X == 4 || X == 5
3787 (unsigned) (X - 2) <= 3
3789 We describe each set of comparisons as being either inside or outside
3790 a range, using a variable named like IN_P, and then describe the
3791 range with a lower and upper bound. If one of the bounds is omitted,
3792 it represents either the highest or lowest value of the type.
3794 In the comments below, we represent a range by two numbers in brackets
3795 preceded by a "+" to designate being inside that range, or a "-" to
3796 designate being outside that range, so the condition can be inverted by
3797 flipping the prefix. An omitted bound is represented by a "-". For
3798 example, "- [-, 10]" means being outside the range starting at the lowest
3799 possible value and ending at 10, in other words, being greater than 10.
3800 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3803 We set up things so that the missing bounds are handled in a consistent
3804 manner so neither a missing bound nor "true" and "false" need to be
3805 handled using a special case. */
3807 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3808 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3809 and UPPER1_P are nonzero if the respective argument is an upper bound
3810 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3811 must be specified for a comparison. ARG1 will be converted to ARG0's
3812 type if both are specified. */
3815 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3816 tree arg1, int upper1_p)
3822 /* If neither arg represents infinity, do the normal operation.
3823 Else, if not a comparison, return infinity. Else handle the special
3824 comparison rules. Note that most of the cases below won't occur, but
3825 are handled for consistency. */
3827 if (arg0 != 0 && arg1 != 0)
3829 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3830 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3832 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3835 if (TREE_CODE_CLASS (code) != tcc_comparison)
3838 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3839 for neither. In real maths, we cannot assume open ended ranges are
3840 the same. But, this is computer arithmetic, where numbers are finite.
3841 We can therefore make the transformation of any unbounded range with
3842 the value Z, Z being greater than any representable number. This permits
3843 us to treat unbounded ranges as equal. */
3844 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3845 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3849 result = sgn0 == sgn1;
3852 result = sgn0 != sgn1;
3855 result = sgn0 < sgn1;
3858 result = sgn0 <= sgn1;
3861 result = sgn0 > sgn1;
3864 result = sgn0 >= sgn1;
3870 return constant_boolean_node (result, type);
3873 /* Given EXP, a logical expression, set the range it is testing into
3874 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3875 actually being tested. *PLOW and *PHIGH will be made of the same
3876 type as the returned expression. If EXP is not a comparison, we
3877 will most likely not be returning a useful value and range. Set
3878 *STRICT_OVERFLOW_P to true if the return value is only valid
3879 because signed overflow is undefined; otherwise, do not change
3880 *STRICT_OVERFLOW_P. */
3883 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3884 bool *strict_overflow_p)
3886 enum tree_code code;
3887 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3888 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3890 tree low, high, n_low, n_high;
3891 location_t loc = EXPR_LOCATION (exp);
3893 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3894 and see if we can refine the range. Some of the cases below may not
3895 happen, but it doesn't seem worth worrying about this. We "continue"
3896 the outer loop when we've changed something; otherwise we "break"
3897 the switch, which will "break" the while. */
3900 low = high = build_int_cst (TREE_TYPE (exp), 0);
3904 code = TREE_CODE (exp);
3905 exp_type = TREE_TYPE (exp);
3907 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3909 if (TREE_OPERAND_LENGTH (exp) > 0)
3910 arg0 = TREE_OPERAND (exp, 0);
3911 if (TREE_CODE_CLASS (code) == tcc_comparison
3912 || TREE_CODE_CLASS (code) == tcc_unary
3913 || TREE_CODE_CLASS (code) == tcc_binary)
3914 arg0_type = TREE_TYPE (arg0);
3915 if (TREE_CODE_CLASS (code) == tcc_binary
3916 || TREE_CODE_CLASS (code) == tcc_comparison
3917 || (TREE_CODE_CLASS (code) == tcc_expression
3918 && TREE_OPERAND_LENGTH (exp) > 1))
3919 arg1 = TREE_OPERAND (exp, 1);
3924 case TRUTH_NOT_EXPR:
3925 in_p = ! in_p, exp = arg0;
3928 case EQ_EXPR: case NE_EXPR:
3929 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3930 /* We can only do something if the range is testing for zero
3931 and if the second operand is an integer constant. Note that
3932 saying something is "in" the range we make is done by
3933 complementing IN_P since it will set in the initial case of
3934 being not equal to zero; "out" is leaving it alone. */
3935 if (low == 0 || high == 0
3936 || ! integer_zerop (low) || ! integer_zerop (high)
3937 || TREE_CODE (arg1) != INTEGER_CST)
3942 case NE_EXPR: /* - [c, c] */
3945 case EQ_EXPR: /* + [c, c] */
3946 in_p = ! in_p, low = high = arg1;
3948 case GT_EXPR: /* - [-, c] */
3949 low = 0, high = arg1;
3951 case GE_EXPR: /* + [c, -] */
3952 in_p = ! in_p, low = arg1, high = 0;
3954 case LT_EXPR: /* - [c, -] */
3955 low = arg1, high = 0;
3957 case LE_EXPR: /* + [-, c] */
3958 in_p = ! in_p, low = 0, high = arg1;
3964 /* If this is an unsigned comparison, we also know that EXP is
3965 greater than or equal to zero. We base the range tests we make
3966 on that fact, so we record it here so we can parse existing
3967 range tests. We test arg0_type since often the return type
3968 of, e.g. EQ_EXPR, is boolean. */
3969 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3971 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3973 build_int_cst (arg0_type, 0),
3977 in_p = n_in_p, low = n_low, high = n_high;
3979 /* If the high bound is missing, but we have a nonzero low
3980 bound, reverse the range so it goes from zero to the low bound
3982 if (high == 0 && low && ! integer_zerop (low))
3985 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3986 integer_one_node, 0);
3987 low = build_int_cst (arg0_type, 0);
3995 /* (-x) IN [a,b] -> x in [-b, -a] */
3996 n_low = range_binop (MINUS_EXPR, exp_type,
3997 build_int_cst (exp_type, 0),
3999 n_high = range_binop (MINUS_EXPR, exp_type,
4000 build_int_cst (exp_type, 0),
4002 if (n_high != 0 && TREE_OVERFLOW (n_high))
4008 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4009 build_int_cst (exp_type, 1));
4010 SET_EXPR_LOCATION (exp, loc);
4013 case PLUS_EXPR: case MINUS_EXPR:
4014 if (TREE_CODE (arg1) != INTEGER_CST)
4017 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4018 move a constant to the other side. */
4019 if (!TYPE_UNSIGNED (arg0_type)
4020 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4023 /* If EXP is signed, any overflow in the computation is undefined,
4024 so we don't worry about it so long as our computations on
4025 the bounds don't overflow. For unsigned, overflow is defined
4026 and this is exactly the right thing. */
4027 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4028 arg0_type, low, 0, arg1, 0);
4029 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4030 arg0_type, high, 1, arg1, 0);
4031 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4032 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4035 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4036 *strict_overflow_p = true;
4039 /* Check for an unsigned range which has wrapped around the maximum
4040 value thus making n_high < n_low, and normalize it. */
4041 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4043 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4044 integer_one_node, 0);
4045 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4046 integer_one_node, 0);
4048 /* If the range is of the form +/- [ x+1, x ], we won't
4049 be able to normalize it. But then, it represents the
4050 whole range or the empty set, so make it
4052 if (tree_int_cst_equal (n_low, low)
4053 && tree_int_cst_equal (n_high, high))
4059 low = n_low, high = n_high;
4064 CASE_CONVERT: case NON_LVALUE_EXPR:
4065 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4068 if (! INTEGRAL_TYPE_P (arg0_type)
4069 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4070 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4073 n_low = low, n_high = high;
4076 n_low = fold_convert_loc (loc, arg0_type, n_low);
4079 n_high = fold_convert_loc (loc, arg0_type, n_high);
4082 /* If we're converting arg0 from an unsigned type, to exp,
4083 a signed type, we will be doing the comparison as unsigned.
4084 The tests above have already verified that LOW and HIGH
4087 So we have to ensure that we will handle large unsigned
4088 values the same way that the current signed bounds treat
4091 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4095 /* For fixed-point modes, we need to pass the saturating flag
4096 as the 2nd parameter. */
4097 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4098 equiv_type = lang_hooks.types.type_for_mode
4099 (TYPE_MODE (arg0_type),
4100 TYPE_SATURATING (arg0_type));
4102 equiv_type = lang_hooks.types.type_for_mode
4103 (TYPE_MODE (arg0_type), 1);
4105 /* A range without an upper bound is, naturally, unbounded.
4106 Since convert would have cropped a very large value, use
4107 the max value for the destination type. */
4109 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4110 : TYPE_MAX_VALUE (arg0_type);
4112 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4113 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4114 fold_convert_loc (loc, arg0_type,
4116 build_int_cst (arg0_type, 1));
4118 /* If the low bound is specified, "and" the range with the
4119 range for which the original unsigned value will be
4123 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4124 1, n_low, n_high, 1,
4125 fold_convert_loc (loc, arg0_type,
4130 in_p = (n_in_p == in_p);
4134 /* Otherwise, "or" the range with the range of the input
4135 that will be interpreted as negative. */
4136 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4137 0, n_low, n_high, 1,
4138 fold_convert_loc (loc, arg0_type,
4143 in_p = (in_p != n_in_p);
4148 low = n_low, high = n_high;
4158 /* If EXP is a constant, we can evaluate whether this is true or false. */
4159 if (TREE_CODE (exp) == INTEGER_CST)
4161 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4163 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4169 *pin_p = in_p, *plow = low, *phigh = high;
4173 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4174 type, TYPE, return an expression to test if EXP is in (or out of, depending
4175 on IN_P) the range. Return 0 if the test couldn't be created. */
4178 build_range_check (location_t loc, tree type, tree exp, int in_p,
4179 tree low, tree high)
4181 tree etype = TREE_TYPE (exp), value;
4183 #ifdef HAVE_canonicalize_funcptr_for_compare
4184 /* Disable this optimization for function pointer expressions
4185 on targets that require function pointer canonicalization. */
4186 if (HAVE_canonicalize_funcptr_for_compare
4187 && TREE_CODE (etype) == POINTER_TYPE
4188 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4194 value = build_range_check (loc, type, exp, 1, low, high);
4196 return invert_truthvalue_loc (loc, value);
4201 if (low == 0 && high == 0)
4202 return build_int_cst (type, 1);
4205 return fold_build2_loc (loc, LE_EXPR, type, exp,
4206 fold_convert_loc (loc, etype, high));
4209 return fold_build2_loc (loc, GE_EXPR, type, exp,
4210 fold_convert_loc (loc, etype, low));
4212 if (operand_equal_p (low, high, 0))
4213 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4214 fold_convert_loc (loc, etype, low));
4216 if (integer_zerop (low))
4218 if (! TYPE_UNSIGNED (etype))
4220 etype = unsigned_type_for (etype);
4221 high = fold_convert_loc (loc, etype, high);
4222 exp = fold_convert_loc (loc, etype, exp);
4224 return build_range_check (loc, type, exp, 1, 0, high);
4227 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4228 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4230 unsigned HOST_WIDE_INT lo;
4234 prec = TYPE_PRECISION (etype);
4235 if (prec <= HOST_BITS_PER_WIDE_INT)
4238 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4242 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4243 lo = (unsigned HOST_WIDE_INT) -1;
4246 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4248 if (TYPE_UNSIGNED (etype))
4250 tree signed_etype = signed_type_for (etype);
4251 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4253 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4255 etype = signed_etype;
4256 exp = fold_convert_loc (loc, etype, exp);
4258 return fold_build2_loc (loc, GT_EXPR, type, exp,
4259 build_int_cst (etype, 0));
4263 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4264 This requires wrap-around arithmetics for the type of the expression.
4265 First make sure that arithmetics in this type is valid, then make sure
4266 that it wraps around. */
4267 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4268 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4269 TYPE_UNSIGNED (etype));
4271 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4273 tree utype, minv, maxv;
4275 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4276 for the type in question, as we rely on this here. */
4277 utype = unsigned_type_for (etype);
4278 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4279 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4280 integer_one_node, 1);
4281 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4283 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4290 high = fold_convert_loc (loc, etype, high);
4291 low = fold_convert_loc (loc, etype, low);
4292 exp = fold_convert_loc (loc, etype, exp);
4294 value = const_binop (MINUS_EXPR, high, low);
4297 if (POINTER_TYPE_P (etype))
4299 if (value != 0 && !TREE_OVERFLOW (value))
4301 low = fold_convert_loc (loc, sizetype, low);
4302 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4303 return build_range_check (loc, type,
4304 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4306 1, build_int_cst (etype, 0), value);
4311 if (value != 0 && !TREE_OVERFLOW (value))
4312 return build_range_check (loc, type,
4313 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4314 1, build_int_cst (etype, 0), value);
4319 /* Return the predecessor of VAL in its type, handling the infinite case. */
4322 range_predecessor (tree val)
4324 tree type = TREE_TYPE (val);
4326 if (INTEGRAL_TYPE_P (type)
4327 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4330 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4333 /* Return the successor of VAL in its type, handling the infinite case. */
4336 range_successor (tree val)
4338 tree type = TREE_TYPE (val);
4340 if (INTEGRAL_TYPE_P (type)
4341 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4344 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4347 /* Given two ranges, see if we can merge them into one. Return 1 if we
4348 can, 0 if we can't. Set the output range into the specified parameters. */
4351 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4352 tree high0, int in1_p, tree low1, tree high1)
4360 int lowequal = ((low0 == 0 && low1 == 0)
4361 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4362 low0, 0, low1, 0)));
4363 int highequal = ((high0 == 0 && high1 == 0)
4364 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4365 high0, 1, high1, 1)));
4367 /* Make range 0 be the range that starts first, or ends last if they
4368 start at the same value. Swap them if it isn't. */
4369 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4372 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4373 high1, 1, high0, 1))))
4375 temp = in0_p, in0_p = in1_p, in1_p = temp;
4376 tem = low0, low0 = low1, low1 = tem;
4377 tem = high0, high0 = high1, high1 = tem;
4380 /* Now flag two cases, whether the ranges are disjoint or whether the
4381 second range is totally subsumed in the first. Note that the tests
4382 below are simplified by the ones above. */
4383 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4384 high0, 1, low1, 0));
4385 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4386 high1, 1, high0, 1));
4388 /* We now have four cases, depending on whether we are including or
4389 excluding the two ranges. */
4392 /* If they don't overlap, the result is false. If the second range
4393 is a subset it is the result. Otherwise, the range is from the start
4394 of the second to the end of the first. */
4396 in_p = 0, low = high = 0;
4398 in_p = 1, low = low1, high = high1;
4400 in_p = 1, low = low1, high = high0;
4403 else if (in0_p && ! in1_p)
4405 /* If they don't overlap, the result is the first range. If they are
4406 equal, the result is false. If the second range is a subset of the
4407 first, and the ranges begin at the same place, we go from just after
4408 the end of the second range to the end of the first. If the second
4409 range is not a subset of the first, or if it is a subset and both
4410 ranges end at the same place, the range starts at the start of the
4411 first range and ends just before the second range.
4412 Otherwise, we can't describe this as a single range. */
4414 in_p = 1, low = low0, high = high0;
4415 else if (lowequal && highequal)
4416 in_p = 0, low = high = 0;
4417 else if (subset && lowequal)
4419 low = range_successor (high1);
4424 /* We are in the weird situation where high0 > high1 but
4425 high1 has no successor. Punt. */
4429 else if (! subset || highequal)
4432 high = range_predecessor (low1);
4436 /* low0 < low1 but low1 has no predecessor. Punt. */
4444 else if (! in0_p && in1_p)
4446 /* If they don't overlap, the result is the second range. If the second
4447 is a subset of the first, the result is false. Otherwise,
4448 the range starts just after the first range and ends at the
4449 end of the second. */
4451 in_p = 1, low = low1, high = high1;
4452 else if (subset || highequal)
4453 in_p = 0, low = high = 0;
4456 low = range_successor (high0);
4461 /* high1 > high0 but high0 has no successor. Punt. */
4469 /* The case where we are excluding both ranges. Here the complex case
4470 is if they don't overlap. In that case, the only time we have a
4471 range is if they are adjacent. If the second is a subset of the
4472 first, the result is the first. Otherwise, the range to exclude
4473 starts at the beginning of the first range and ends at the end of the
4477 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4478 range_successor (high0),
4480 in_p = 0, low = low0, high = high1;
4483 /* Canonicalize - [min, x] into - [-, x]. */
4484 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4485 switch (TREE_CODE (TREE_TYPE (low0)))
4488 if (TYPE_PRECISION (TREE_TYPE (low0))
4489 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4493 if (tree_int_cst_equal (low0,
4494 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4498 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4499 && integer_zerop (low0))
4506 /* Canonicalize - [x, max] into - [x, -]. */
4507 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4508 switch (TREE_CODE (TREE_TYPE (high1)))
4511 if (TYPE_PRECISION (TREE_TYPE (high1))
4512 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4516 if (tree_int_cst_equal (high1,
4517 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4521 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4522 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4524 integer_one_node, 1)))
4531 /* The ranges might be also adjacent between the maximum and
4532 minimum values of the given type. For
4533 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4534 return + [x + 1, y - 1]. */
4535 if (low0 == 0 && high1 == 0)
4537 low = range_successor (high0);
4538 high = range_predecessor (low1);
4539 if (low == 0 || high == 0)
4549 in_p = 0, low = low0, high = high0;
4551 in_p = 0, low = low0, high = high1;
4554 *pin_p = in_p, *plow = low, *phigh = high;
4559 /* Subroutine of fold, looking inside expressions of the form
4560 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4561 of the COND_EXPR. This function is being used also to optimize
4562 A op B ? C : A, by reversing the comparison first.
4564 Return a folded expression whose code is not a COND_EXPR
4565 anymore, or NULL_TREE if no folding opportunity is found. */
4568 fold_cond_expr_with_comparison (location_t loc, tree type,
4569 tree arg0, tree arg1, tree arg2)
4571 enum tree_code comp_code = TREE_CODE (arg0);
4572 tree arg00 = TREE_OPERAND (arg0, 0);
4573 tree arg01 = TREE_OPERAND (arg0, 1);
4574 tree arg1_type = TREE_TYPE (arg1);
4580 /* If we have A op 0 ? A : -A, consider applying the following
4583 A == 0? A : -A same as -A
4584 A != 0? A : -A same as A
4585 A >= 0? A : -A same as abs (A)
4586 A > 0? A : -A same as abs (A)
4587 A <= 0? A : -A same as -abs (A)
4588 A < 0? A : -A same as -abs (A)
4590 None of these transformations work for modes with signed
4591 zeros. If A is +/-0, the first two transformations will
4592 change the sign of the result (from +0 to -0, or vice
4593 versa). The last four will fix the sign of the result,
4594 even though the original expressions could be positive or
4595 negative, depending on the sign of A.
4597 Note that all these transformations are correct if A is
4598 NaN, since the two alternatives (A and -A) are also NaNs. */
4599 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4600 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4601 ? real_zerop (arg01)
4602 : integer_zerop (arg01))
4603 && ((TREE_CODE (arg2) == NEGATE_EXPR
4604 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4605 /* In the case that A is of the form X-Y, '-A' (arg2) may
4606 have already been folded to Y-X, check for that. */
4607 || (TREE_CODE (arg1) == MINUS_EXPR
4608 && TREE_CODE (arg2) == MINUS_EXPR
4609 && operand_equal_p (TREE_OPERAND (arg1, 0),
4610 TREE_OPERAND (arg2, 1), 0)
4611 && operand_equal_p (TREE_OPERAND (arg1, 1),
4612 TREE_OPERAND (arg2, 0), 0))))
4617 tem = fold_convert_loc (loc, arg1_type, arg1);
4618 return pedantic_non_lvalue_loc (loc,
4619 fold_convert_loc (loc, type,
4620 negate_expr (tem)));
4623 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4626 if (flag_trapping_math)
4631 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4632 arg1 = fold_convert_loc (loc, signed_type_for
4633 (TREE_TYPE (arg1)), arg1);
4634 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4635 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4638 if (flag_trapping_math)
4642 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4643 arg1 = fold_convert_loc (loc, signed_type_for
4644 (TREE_TYPE (arg1)), arg1);
4645 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4646 return negate_expr (fold_convert_loc (loc, type, tem));
4648 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4652 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4653 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4654 both transformations are correct when A is NaN: A != 0
4655 is then true, and A == 0 is false. */
4657 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4658 && integer_zerop (arg01) && integer_zerop (arg2))
4660 if (comp_code == NE_EXPR)
4661 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4662 else if (comp_code == EQ_EXPR)
4663 return build_int_cst (type, 0);
4666 /* Try some transformations of A op B ? A : B.
4668 A == B? A : B same as B
4669 A != B? A : B same as A
4670 A >= B? A : B same as max (A, B)
4671 A > B? A : B same as max (B, A)
4672 A <= B? A : B same as min (A, B)
4673 A < B? A : B same as min (B, A)
4675 As above, these transformations don't work in the presence
4676 of signed zeros. For example, if A and B are zeros of
4677 opposite sign, the first two transformations will change
4678 the sign of the result. In the last four, the original
4679 expressions give different results for (A=+0, B=-0) and
4680 (A=-0, B=+0), but the transformed expressions do not.
4682 The first two transformations are correct if either A or B
4683 is a NaN. In the first transformation, the condition will
4684 be false, and B will indeed be chosen. In the case of the
4685 second transformation, the condition A != B will be true,
4686 and A will be chosen.
4688 The conversions to max() and min() are not correct if B is
4689 a number and A is not. The conditions in the original
4690 expressions will be false, so all four give B. The min()
4691 and max() versions would give a NaN instead. */
4692 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4693 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4694 /* Avoid these transformations if the COND_EXPR may be used
4695 as an lvalue in the C++ front-end. PR c++/19199. */
4697 || (strcmp (lang_hooks.name, "GNU C++") != 0
4698 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4699 || ! maybe_lvalue_p (arg1)
4700 || ! maybe_lvalue_p (arg2)))
4702 tree comp_op0 = arg00;
4703 tree comp_op1 = arg01;
4704 tree comp_type = TREE_TYPE (comp_op0);
4706 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4707 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4717 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4719 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4724 /* In C++ a ?: expression can be an lvalue, so put the
4725 operand which will be used if they are equal first
4726 so that we can convert this back to the
4727 corresponding COND_EXPR. */
4728 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4730 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4731 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4732 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4733 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4734 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4735 comp_op1, comp_op0);
4736 return pedantic_non_lvalue_loc (loc,
4737 fold_convert_loc (loc, type, tem));
4744 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4746 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4747 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4748 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4749 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4750 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4751 comp_op1, comp_op0);
4752 return pedantic_non_lvalue_loc (loc,
4753 fold_convert_loc (loc, type, tem));
4757 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4758 return pedantic_non_lvalue_loc (loc,
4759 fold_convert_loc (loc, type, arg2));
4762 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4763 return pedantic_non_lvalue_loc (loc,
4764 fold_convert_loc (loc, type, arg1));
4767 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4772 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4773 we might still be able to simplify this. For example,
4774 if C1 is one less or one more than C2, this might have started
4775 out as a MIN or MAX and been transformed by this function.
4776 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4778 if (INTEGRAL_TYPE_P (type)
4779 && TREE_CODE (arg01) == INTEGER_CST
4780 && TREE_CODE (arg2) == INTEGER_CST)
4784 if (TREE_CODE (arg1) == INTEGER_CST)
4786 /* We can replace A with C1 in this case. */
4787 arg1 = fold_convert_loc (loc, type, arg01);
4788 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4791 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4792 MIN_EXPR, to preserve the signedness of the comparison. */
4793 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4795 && operand_equal_p (arg01,
4796 const_binop (PLUS_EXPR, arg2,
4797 build_int_cst (type, 1)),
4800 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4801 fold_convert_loc (loc, TREE_TYPE (arg00),
4803 return pedantic_non_lvalue_loc (loc,
4804 fold_convert_loc (loc, type, tem));
4809 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4811 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4813 && operand_equal_p (arg01,
4814 const_binop (MINUS_EXPR, arg2,
4815 build_int_cst (type, 1)),
4818 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4819 fold_convert_loc (loc, TREE_TYPE (arg00),
4821 return pedantic_non_lvalue_loc (loc,
4822 fold_convert_loc (loc, type, tem));
4827 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4828 MAX_EXPR, to preserve the signedness of the comparison. */
4829 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4831 && operand_equal_p (arg01,
4832 const_binop (MINUS_EXPR, arg2,
4833 build_int_cst (type, 1)),
4836 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4837 fold_convert_loc (loc, TREE_TYPE (arg00),
4839 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4844 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4845 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4847 && operand_equal_p (arg01,
4848 const_binop (PLUS_EXPR, arg2,
4849 build_int_cst (type, 1)),
4852 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4853 fold_convert_loc (loc, TREE_TYPE (arg00),
4855 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4869 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4870 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4871 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4875 /* EXP is some logical combination of boolean tests. See if we can
4876 merge it into some range test. Return the new tree if so. */
4879 fold_range_test (location_t loc, enum tree_code code, tree type,
4882 int or_op = (code == TRUTH_ORIF_EXPR
4883 || code == TRUTH_OR_EXPR);
4884 int in0_p, in1_p, in_p;
4885 tree low0, low1, low, high0, high1, high;
4886 bool strict_overflow_p = false;
4887 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4888 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4890 const char * const warnmsg = G_("assuming signed overflow does not occur "
4891 "when simplifying range test");
4893 /* If this is an OR operation, invert both sides; we will invert
4894 again at the end. */
4896 in0_p = ! in0_p, in1_p = ! in1_p;
4898 /* If both expressions are the same, if we can merge the ranges, and we
4899 can build the range test, return it or it inverted. If one of the
4900 ranges is always true or always false, consider it to be the same
4901 expression as the other. */
4902 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4903 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4905 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4907 : rhs != 0 ? rhs : integer_zero_node,
4910 if (strict_overflow_p)
4911 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4912 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4915 /* On machines where the branch cost is expensive, if this is a
4916 short-circuited branch and the underlying object on both sides
4917 is the same, make a non-short-circuit operation. */
4918 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4919 && lhs != 0 && rhs != 0
4920 && (code == TRUTH_ANDIF_EXPR
4921 || code == TRUTH_ORIF_EXPR)
4922 && operand_equal_p (lhs, rhs, 0))
4924 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4925 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4926 which cases we can't do this. */
4927 if (simple_operand_p (lhs))
4929 tem = build2 (code == TRUTH_ANDIF_EXPR
4930 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4932 SET_EXPR_LOCATION (tem, loc);
4936 else if (lang_hooks.decls.global_bindings_p () == 0
4937 && ! CONTAINS_PLACEHOLDER_P (lhs))
4939 tree common = save_expr (lhs);
4941 if (0 != (lhs = build_range_check (loc, type, common,
4942 or_op ? ! in0_p : in0_p,
4944 && (0 != (rhs = build_range_check (loc, type, common,
4945 or_op ? ! in1_p : in1_p,
4948 if (strict_overflow_p)
4949 fold_overflow_warning (warnmsg,
4950 WARN_STRICT_OVERFLOW_COMPARISON);
4951 tem = build2 (code == TRUTH_ANDIF_EXPR
4952 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4954 SET_EXPR_LOCATION (tem, loc);
4963 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4964 bit value. Arrange things so the extra bits will be set to zero if and
4965 only if C is signed-extended to its full width. If MASK is nonzero,
4966 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4969 unextend (tree c, int p, int unsignedp, tree mask)
4971 tree type = TREE_TYPE (c);
4972 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4975 if (p == modesize || unsignedp)
4978 /* We work by getting just the sign bit into the low-order bit, then
4979 into the high-order bit, then sign-extend. We then XOR that value
4981 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4982 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4984 /* We must use a signed type in order to get an arithmetic right shift.
4985 However, we must also avoid introducing accidental overflows, so that
4986 a subsequent call to integer_zerop will work. Hence we must
4987 do the type conversion here. At this point, the constant is either
4988 zero or one, and the conversion to a signed type can never overflow.
4989 We could get an overflow if this conversion is done anywhere else. */
4990 if (TYPE_UNSIGNED (type))
4991 temp = fold_convert (signed_type_for (type), temp);
4993 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4994 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4996 temp = const_binop (BIT_AND_EXPR, temp,
4997 fold_convert (TREE_TYPE (c), mask));
4998 /* If necessary, convert the type back to match the type of C. */
4999 if (TYPE_UNSIGNED (type))
5000 temp = fold_convert (type, temp);
5002 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5005 /* For an expression that has the form
5009 we can drop one of the inner expressions and simplify to
5013 LOC is the location of the resulting expression. OP is the inner
5014 logical operation; the left-hand side in the examples above, while CMPOP
5015 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5016 removing a condition that guards another, as in
5017 (A != NULL && A->...) || A == NULL
5018 which we must not transform. If RHS_ONLY is true, only eliminate the
5019 right-most operand of the inner logical operation. */
5022 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5025 tree type = TREE_TYPE (cmpop);
5026 enum tree_code code = TREE_CODE (cmpop);
5027 enum tree_code truthop_code = TREE_CODE (op);
5028 tree lhs = TREE_OPERAND (op, 0);
5029 tree rhs = TREE_OPERAND (op, 1);
5030 tree orig_lhs = lhs, orig_rhs = rhs;
5031 enum tree_code rhs_code = TREE_CODE (rhs);
5032 enum tree_code lhs_code = TREE_CODE (lhs);
5033 enum tree_code inv_code;
5035 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5038 if (TREE_CODE_CLASS (code) != tcc_comparison)
5041 if (rhs_code == truthop_code)
5043 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5044 if (newrhs != NULL_TREE)
5047 rhs_code = TREE_CODE (rhs);
5050 if (lhs_code == truthop_code && !rhs_only)
5052 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5053 if (newlhs != NULL_TREE)
5056 lhs_code = TREE_CODE (lhs);
5060 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5061 if (inv_code == rhs_code
5062 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5063 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5065 if (!rhs_only && inv_code == lhs_code
5066 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5067 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5069 if (rhs != orig_rhs || lhs != orig_lhs)
5070 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5075 /* Find ways of folding logical expressions of LHS and RHS:
5076 Try to merge two comparisons to the same innermost item.
5077 Look for range tests like "ch >= '0' && ch <= '9'".
5078 Look for combinations of simple terms on machines with expensive branches
5079 and evaluate the RHS unconditionally.
5081 For example, if we have p->a == 2 && p->b == 4 and we can make an
5082 object large enough to span both A and B, we can do this with a comparison
5083 against the object ANDed with the a mask.
5085 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5086 operations to do this with one comparison.
5088 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5089 function and the one above.
5091 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5092 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5094 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5097 We return the simplified tree or 0 if no optimization is possible. */
5100 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5103 /* If this is the "or" of two comparisons, we can do something if
5104 the comparisons are NE_EXPR. If this is the "and", we can do something
5105 if the comparisons are EQ_EXPR. I.e.,
5106 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5108 WANTED_CODE is this operation code. For single bit fields, we can
5109 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5110 comparison for one-bit fields. */
5112 enum tree_code wanted_code;
5113 enum tree_code lcode, rcode;
5114 tree ll_arg, lr_arg, rl_arg, rr_arg;
5115 tree ll_inner, lr_inner, rl_inner, rr_inner;
5116 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5117 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5118 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5119 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5120 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5121 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5122 enum machine_mode lnmode, rnmode;
5123 tree ll_mask, lr_mask, rl_mask, rr_mask;
5124 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5125 tree l_const, r_const;
5126 tree lntype, rntype, result;
5127 HOST_WIDE_INT first_bit, end_bit;
5129 tree orig_lhs = lhs, orig_rhs = rhs;
5130 enum tree_code orig_code = code;
5132 /* Start by getting the comparison codes. Fail if anything is volatile.
5133 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5134 it were surrounded with a NE_EXPR. */
5136 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5139 lcode = TREE_CODE (lhs);
5140 rcode = TREE_CODE (rhs);
5142 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5144 lhs = build2 (NE_EXPR, truth_type, lhs,
5145 build_int_cst (TREE_TYPE (lhs), 0));
5149 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5151 rhs = build2 (NE_EXPR, truth_type, rhs,
5152 build_int_cst (TREE_TYPE (rhs), 0));
5156 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5157 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5160 ll_arg = TREE_OPERAND (lhs, 0);
5161 lr_arg = TREE_OPERAND (lhs, 1);
5162 rl_arg = TREE_OPERAND (rhs, 0);
5163 rr_arg = TREE_OPERAND (rhs, 1);
5165 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5166 if (simple_operand_p (ll_arg)
5167 && simple_operand_p (lr_arg))
5170 if (operand_equal_p (ll_arg, rl_arg, 0)
5171 && operand_equal_p (lr_arg, rr_arg, 0))
5173 result = combine_comparisons (loc, code, lcode, rcode,
5174 truth_type, ll_arg, lr_arg);
5178 else if (operand_equal_p (ll_arg, rr_arg, 0)
5179 && operand_equal_p (lr_arg, rl_arg, 0))
5181 result = combine_comparisons (loc, code, lcode,
5182 swap_tree_comparison (rcode),
5183 truth_type, ll_arg, lr_arg);
5189 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5190 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5192 /* If the RHS can be evaluated unconditionally and its operands are
5193 simple, it wins to evaluate the RHS unconditionally on machines
5194 with expensive branches. In this case, this isn't a comparison
5195 that can be merged. Avoid doing this if the RHS is a floating-point
5196 comparison since those can trap. */
5198 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5200 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5201 && simple_operand_p (rl_arg)
5202 && simple_operand_p (rr_arg))
5204 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5205 if (code == TRUTH_OR_EXPR
5206 && lcode == NE_EXPR && integer_zerop (lr_arg)
5207 && rcode == NE_EXPR && integer_zerop (rr_arg)
5208 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5209 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5211 result = build2 (NE_EXPR, truth_type,
5212 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5214 build_int_cst (TREE_TYPE (ll_arg), 0));
5215 goto fold_truthop_exit;
5218 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5219 if (code == TRUTH_AND_EXPR
5220 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5221 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5222 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5223 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5225 result = build2 (EQ_EXPR, truth_type,
5226 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5228 build_int_cst (TREE_TYPE (ll_arg), 0));
5229 goto fold_truthop_exit;
5232 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5234 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5236 result = build2 (code, truth_type, lhs, rhs);
5237 goto fold_truthop_exit;
5243 /* See if the comparisons can be merged. Then get all the parameters for
5246 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5247 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5251 ll_inner = decode_field_reference (loc, ll_arg,
5252 &ll_bitsize, &ll_bitpos, &ll_mode,
5253 &ll_unsignedp, &volatilep, &ll_mask,
5255 lr_inner = decode_field_reference (loc, lr_arg,
5256 &lr_bitsize, &lr_bitpos, &lr_mode,
5257 &lr_unsignedp, &volatilep, &lr_mask,
5259 rl_inner = decode_field_reference (loc, rl_arg,
5260 &rl_bitsize, &rl_bitpos, &rl_mode,
5261 &rl_unsignedp, &volatilep, &rl_mask,
5263 rr_inner = decode_field_reference (loc, rr_arg,
5264 &rr_bitsize, &rr_bitpos, &rr_mode,
5265 &rr_unsignedp, &volatilep, &rr_mask,
5268 /* It must be true that the inner operation on the lhs of each
5269 comparison must be the same if we are to be able to do anything.
5270 Then see if we have constants. If not, the same must be true for
5272 if (volatilep || ll_inner == 0 || rl_inner == 0
5273 || ! operand_equal_p (ll_inner, rl_inner, 0))
5276 if (TREE_CODE (lr_arg) == INTEGER_CST
5277 && TREE_CODE (rr_arg) == INTEGER_CST)
5278 l_const = lr_arg, r_const = rr_arg;
5279 else if (lr_inner == 0 || rr_inner == 0
5280 || ! operand_equal_p (lr_inner, rr_inner, 0))
5283 l_const = r_const = 0;
5285 /* If either comparison code is not correct for our logical operation,
5286 fail. However, we can convert a one-bit comparison against zero into
5287 the opposite comparison against that bit being set in the field. */
5289 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5290 if (lcode != wanted_code)
5292 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5294 /* Make the left operand unsigned, since we are only interested
5295 in the value of one bit. Otherwise we are doing the wrong
5304 /* This is analogous to the code for l_const above. */
5305 if (rcode != wanted_code)
5307 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5316 /* See if we can find a mode that contains both fields being compared on
5317 the left. If we can't, fail. Otherwise, update all constants and masks
5318 to be relative to a field of that size. */
5319 first_bit = MIN (ll_bitpos, rl_bitpos);
5320 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5321 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5322 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5324 if (lnmode == VOIDmode)
5327 lnbitsize = GET_MODE_BITSIZE (lnmode);
5328 lnbitpos = first_bit & ~ (lnbitsize - 1);
5329 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5330 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5332 if (BYTES_BIG_ENDIAN)
5334 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5335 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5338 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5339 size_int (xll_bitpos));
5340 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5341 size_int (xrl_bitpos));
5345 l_const = fold_convert_loc (loc, lntype, l_const);
5346 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5347 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5348 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5349 fold_build1_loc (loc, BIT_NOT_EXPR,
5352 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5354 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5359 r_const = fold_convert_loc (loc, lntype, r_const);
5360 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5361 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5362 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5363 fold_build1_loc (loc, BIT_NOT_EXPR,
5366 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5368 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5372 /* If the right sides are not constant, do the same for it. Also,
5373 disallow this optimization if a size or signedness mismatch occurs
5374 between the left and right sides. */
5377 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5378 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5379 /* Make sure the two fields on the right
5380 correspond to the left without being swapped. */
5381 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5384 first_bit = MIN (lr_bitpos, rr_bitpos);
5385 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5386 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5387 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5389 if (rnmode == VOIDmode)
5392 rnbitsize = GET_MODE_BITSIZE (rnmode);
5393 rnbitpos = first_bit & ~ (rnbitsize - 1);
5394 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5395 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5397 if (BYTES_BIG_ENDIAN)
5399 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5400 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5403 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5405 size_int (xlr_bitpos));
5406 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5408 size_int (xrr_bitpos));
5410 /* Make a mask that corresponds to both fields being compared.
5411 Do this for both items being compared. If the operands are the
5412 same size and the bits being compared are in the same position
5413 then we can do this by masking both and comparing the masked
5415 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5416 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5417 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5419 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5420 ll_unsignedp || rl_unsignedp);
5421 if (! all_ones_mask_p (ll_mask, lnbitsize))
5422 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5424 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5425 lr_unsignedp || rr_unsignedp);
5426 if (! all_ones_mask_p (lr_mask, rnbitsize))
5427 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5429 result = build2 (wanted_code, truth_type, lhs, rhs);
5430 goto fold_truthop_exit;
5433 /* There is still another way we can do something: If both pairs of
5434 fields being compared are adjacent, we may be able to make a wider
5435 field containing them both.
5437 Note that we still must mask the lhs/rhs expressions. Furthermore,
5438 the mask must be shifted to account for the shift done by
5439 make_bit_field_ref. */
5440 if ((ll_bitsize + ll_bitpos == rl_bitpos
5441 && lr_bitsize + lr_bitpos == rr_bitpos)
5442 || (ll_bitpos == rl_bitpos + rl_bitsize
5443 && lr_bitpos == rr_bitpos + rr_bitsize))
5447 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5448 ll_bitsize + rl_bitsize,
5449 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5450 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5451 lr_bitsize + rr_bitsize,
5452 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5454 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5455 size_int (MIN (xll_bitpos, xrl_bitpos)));
5456 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5457 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5459 /* Convert to the smaller type before masking out unwanted bits. */
5461 if (lntype != rntype)
5463 if (lnbitsize > rnbitsize)
5465 lhs = fold_convert_loc (loc, rntype, lhs);
5466 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5469 else if (lnbitsize < rnbitsize)
5471 rhs = fold_convert_loc (loc, lntype, rhs);
5472 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5477 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5478 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5480 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5481 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5483 result = build2 (wanted_code, truth_type, lhs, rhs);
5484 goto fold_truthop_exit;
5490 /* Handle the case of comparisons with constants. If there is something in
5491 common between the masks, those bits of the constants must be the same.
5492 If not, the condition is always false. Test for this to avoid generating
5493 incorrect code below. */
5494 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5495 if (! integer_zerop (result)
5496 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5497 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5499 if (wanted_code == NE_EXPR)
5501 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5502 return constant_boolean_node (true, truth_type);
5506 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5507 return constant_boolean_node (false, truth_type);
5511 /* Construct the expression we will return. First get the component
5512 reference we will make. Unless the mask is all ones the width of
5513 that field, perform the mask operation. Then compare with the
5515 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5516 ll_unsignedp || rl_unsignedp);
5518 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5519 if (! all_ones_mask_p (ll_mask, lnbitsize))
5521 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5522 SET_EXPR_LOCATION (result, loc);
5525 result = build2 (wanted_code, truth_type, result,
5526 const_binop (BIT_IOR_EXPR, l_const, r_const));
5529 SET_EXPR_LOCATION (result, loc);
5533 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5537 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5541 enum tree_code op_code;
5544 int consts_equal, consts_lt;
5547 STRIP_SIGN_NOPS (arg0);
5549 op_code = TREE_CODE (arg0);
5550 minmax_const = TREE_OPERAND (arg0, 1);
5551 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5552 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5553 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5554 inner = TREE_OPERAND (arg0, 0);
5556 /* If something does not permit us to optimize, return the original tree. */
5557 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5558 || TREE_CODE (comp_const) != INTEGER_CST
5559 || TREE_OVERFLOW (comp_const)
5560 || TREE_CODE (minmax_const) != INTEGER_CST
5561 || TREE_OVERFLOW (minmax_const))
5564 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5565 and GT_EXPR, doing the rest with recursive calls using logical
5569 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5572 = optimize_minmax_comparison (loc,
5573 invert_tree_comparison (code, false),
5576 return invert_truthvalue_loc (loc, tem);
5582 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5583 optimize_minmax_comparison
5584 (loc, EQ_EXPR, type, arg0, comp_const),
5585 optimize_minmax_comparison
5586 (loc, GT_EXPR, type, arg0, comp_const));
5589 if (op_code == MAX_EXPR && consts_equal)
5590 /* MAX (X, 0) == 0 -> X <= 0 */
5591 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5593 else if (op_code == MAX_EXPR && consts_lt)
5594 /* MAX (X, 0) == 5 -> X == 5 */
5595 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5597 else if (op_code == MAX_EXPR)
5598 /* MAX (X, 0) == -1 -> false */
5599 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5601 else if (consts_equal)
5602 /* MIN (X, 0) == 0 -> X >= 0 */
5603 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5606 /* MIN (X, 0) == 5 -> false */
5607 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5610 /* MIN (X, 0) == -1 -> X == -1 */
5611 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5614 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5615 /* MAX (X, 0) > 0 -> X > 0
5616 MAX (X, 0) > 5 -> X > 5 */
5617 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5619 else if (op_code == MAX_EXPR)
5620 /* MAX (X, 0) > -1 -> true */
5621 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5623 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5624 /* MIN (X, 0) > 0 -> false
5625 MIN (X, 0) > 5 -> false */
5626 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5629 /* MIN (X, 0) > -1 -> X > -1 */
5630 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5637 /* T is an integer expression that is being multiplied, divided, or taken a
5638 modulus (CODE says which and what kind of divide or modulus) by a
5639 constant C. See if we can eliminate that operation by folding it with
5640 other operations already in T. WIDE_TYPE, if non-null, is a type that
5641 should be used for the computation if wider than our type.
5643 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5644 (X * 2) + (Y * 4). We must, however, be assured that either the original
5645 expression would not overflow or that overflow is undefined for the type
5646 in the language in question.
5648 If we return a non-null expression, it is an equivalent form of the
5649 original computation, but need not be in the original type.
5651 We set *STRICT_OVERFLOW_P to true if the return values depends on
5652 signed overflow being undefined. Otherwise we do not change
5653 *STRICT_OVERFLOW_P. */
5656 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5657 bool *strict_overflow_p)
5659 /* To avoid exponential search depth, refuse to allow recursion past
5660 three levels. Beyond that (1) it's highly unlikely that we'll find
5661 something interesting and (2) we've probably processed it before
5662 when we built the inner expression. */
5671 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5678 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5679 bool *strict_overflow_p)
5681 tree type = TREE_TYPE (t);
5682 enum tree_code tcode = TREE_CODE (t);
5683 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5684 > GET_MODE_SIZE (TYPE_MODE (type)))
5685 ? wide_type : type);
5687 int same_p = tcode == code;
5688 tree op0 = NULL_TREE, op1 = NULL_TREE;
5689 bool sub_strict_overflow_p;
5691 /* Don't deal with constants of zero here; they confuse the code below. */
5692 if (integer_zerop (c))
5695 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5696 op0 = TREE_OPERAND (t, 0);
5698 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5699 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5701 /* Note that we need not handle conditional operations here since fold
5702 already handles those cases. So just do arithmetic here. */
5706 /* For a constant, we can always simplify if we are a multiply
5707 or (for divide and modulus) if it is a multiple of our constant. */
5708 if (code == MULT_EXPR
5709 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5710 return const_binop (code, fold_convert (ctype, t),
5711 fold_convert (ctype, c));
5714 CASE_CONVERT: case NON_LVALUE_EXPR:
5715 /* If op0 is an expression ... */
5716 if ((COMPARISON_CLASS_P (op0)
5717 || UNARY_CLASS_P (op0)
5718 || BINARY_CLASS_P (op0)
5719 || VL_EXP_CLASS_P (op0)
5720 || EXPRESSION_CLASS_P (op0))
5721 /* ... and has wrapping overflow, and its type is smaller
5722 than ctype, then we cannot pass through as widening. */
5723 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5724 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5725 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5726 && (TYPE_PRECISION (ctype)
5727 > TYPE_PRECISION (TREE_TYPE (op0))))
5728 /* ... or this is a truncation (t is narrower than op0),
5729 then we cannot pass through this narrowing. */
5730 || (TYPE_PRECISION (type)
5731 < TYPE_PRECISION (TREE_TYPE (op0)))
5732 /* ... or signedness changes for division or modulus,
5733 then we cannot pass through this conversion. */
5734 || (code != MULT_EXPR
5735 && (TYPE_UNSIGNED (ctype)
5736 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5737 /* ... or has undefined overflow while the converted to
5738 type has not, we cannot do the operation in the inner type
5739 as that would introduce undefined overflow. */
5740 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5741 && !TYPE_OVERFLOW_UNDEFINED (type))))
5744 /* Pass the constant down and see if we can make a simplification. If
5745 we can, replace this expression with the inner simplification for
5746 possible later conversion to our or some other type. */
5747 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5748 && TREE_CODE (t2) == INTEGER_CST
5749 && !TREE_OVERFLOW (t2)
5750 && (0 != (t1 = extract_muldiv (op0, t2, code,
5752 ? ctype : NULL_TREE,
5753 strict_overflow_p))))
5758 /* If widening the type changes it from signed to unsigned, then we
5759 must avoid building ABS_EXPR itself as unsigned. */
5760 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5762 tree cstype = (*signed_type_for) (ctype);
5763 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5766 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5767 return fold_convert (ctype, t1);
5771 /* If the constant is negative, we cannot simplify this. */
5772 if (tree_int_cst_sgn (c) == -1)
5776 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5778 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5781 case MIN_EXPR: case MAX_EXPR:
5782 /* If widening the type changes the signedness, then we can't perform
5783 this optimization as that changes the result. */
5784 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5787 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5788 sub_strict_overflow_p = false;
5789 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5790 &sub_strict_overflow_p)) != 0
5791 && (t2 = extract_muldiv (op1, c, code, wide_type,
5792 &sub_strict_overflow_p)) != 0)
5794 if (tree_int_cst_sgn (c) < 0)
5795 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5796 if (sub_strict_overflow_p)
5797 *strict_overflow_p = true;
5798 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5799 fold_convert (ctype, t2));
5803 case LSHIFT_EXPR: case RSHIFT_EXPR:
5804 /* If the second operand is constant, this is a multiplication
5805 or floor division, by a power of two, so we can treat it that
5806 way unless the multiplier or divisor overflows. Signed
5807 left-shift overflow is implementation-defined rather than
5808 undefined in C90, so do not convert signed left shift into
5810 if (TREE_CODE (op1) == INTEGER_CST
5811 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5812 /* const_binop may not detect overflow correctly,
5813 so check for it explicitly here. */
5814 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5815 && TREE_INT_CST_HIGH (op1) == 0
5816 && 0 != (t1 = fold_convert (ctype,
5817 const_binop (LSHIFT_EXPR,
5820 && !TREE_OVERFLOW (t1))
5821 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5822 ? MULT_EXPR : FLOOR_DIV_EXPR,
5824 fold_convert (ctype, op0),
5826 c, code, wide_type, strict_overflow_p);
5829 case PLUS_EXPR: case MINUS_EXPR:
5830 /* See if we can eliminate the operation on both sides. If we can, we
5831 can return a new PLUS or MINUS. If we can't, the only remaining
5832 cases where we can do anything are if the second operand is a
5834 sub_strict_overflow_p = false;
5835 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5836 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5837 if (t1 != 0 && t2 != 0
5838 && (code == MULT_EXPR
5839 /* If not multiplication, we can only do this if both operands
5840 are divisible by c. */
5841 || (multiple_of_p (ctype, op0, c)
5842 && multiple_of_p (ctype, op1, c))))
5844 if (sub_strict_overflow_p)
5845 *strict_overflow_p = true;
5846 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5847 fold_convert (ctype, t2));
5850 /* If this was a subtraction, negate OP1 and set it to be an addition.
5851 This simplifies the logic below. */
5852 if (tcode == MINUS_EXPR)
5854 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5855 /* If OP1 was not easily negatable, the constant may be OP0. */
5856 if (TREE_CODE (op0) == INTEGER_CST)
5867 if (TREE_CODE (op1) != INTEGER_CST)
5870 /* If either OP1 or C are negative, this optimization is not safe for
5871 some of the division and remainder types while for others we need
5872 to change the code. */
5873 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5875 if (code == CEIL_DIV_EXPR)
5876 code = FLOOR_DIV_EXPR;
5877 else if (code == FLOOR_DIV_EXPR)
5878 code = CEIL_DIV_EXPR;
5879 else if (code != MULT_EXPR
5880 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5884 /* If it's a multiply or a division/modulus operation of a multiple
5885 of our constant, do the operation and verify it doesn't overflow. */
5886 if (code == MULT_EXPR
5887 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5889 op1 = const_binop (code, fold_convert (ctype, op1),
5890 fold_convert (ctype, c));
5891 /* We allow the constant to overflow with wrapping semantics. */
5893 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5899 /* If we have an unsigned type is not a sizetype, we cannot widen
5900 the operation since it will change the result if the original
5901 computation overflowed. */
5902 if (TYPE_UNSIGNED (ctype)
5903 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5907 /* If we were able to eliminate our operation from the first side,
5908 apply our operation to the second side and reform the PLUS. */
5909 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5910 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5912 /* The last case is if we are a multiply. In that case, we can
5913 apply the distributive law to commute the multiply and addition
5914 if the multiplication of the constants doesn't overflow. */
5915 if (code == MULT_EXPR)
5916 return fold_build2 (tcode, ctype,
5917 fold_build2 (code, ctype,
5918 fold_convert (ctype, op0),
5919 fold_convert (ctype, c)),
5925 /* We have a special case here if we are doing something like
5926 (C * 8) % 4 since we know that's zero. */
5927 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5928 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5929 /* If the multiplication can overflow we cannot optimize this.
5930 ??? Until we can properly mark individual operations as
5931 not overflowing we need to treat sizetype special here as
5932 stor-layout relies on this opimization to make
5933 DECL_FIELD_BIT_OFFSET always a constant. */
5934 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5935 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5936 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5937 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5938 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5940 *strict_overflow_p = true;
5941 return omit_one_operand (type, integer_zero_node, op0);
5944 /* ... fall through ... */
5946 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5947 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5948 /* If we can extract our operation from the LHS, do so and return a
5949 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5950 do something only if the second operand is a constant. */
5952 && (t1 = extract_muldiv (op0, c, code, wide_type,
5953 strict_overflow_p)) != 0)
5954 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5955 fold_convert (ctype, op1));
5956 else if (tcode == MULT_EXPR && code == MULT_EXPR
5957 && (t1 = extract_muldiv (op1, c, code, wide_type,
5958 strict_overflow_p)) != 0)
5959 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5960 fold_convert (ctype, t1));
5961 else if (TREE_CODE (op1) != INTEGER_CST)
5964 /* If these are the same operation types, we can associate them
5965 assuming no overflow. */
5967 && 0 != (t1 = int_const_binop (MULT_EXPR,
5968 fold_convert (ctype, op1),
5969 fold_convert (ctype, c), 1))
5970 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5971 (TYPE_UNSIGNED (ctype)
5972 && tcode != MULT_EXPR) ? -1 : 1,
5973 TREE_OVERFLOW (t1)))
5974 && !TREE_OVERFLOW (t1))
5975 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5977 /* If these operations "cancel" each other, we have the main
5978 optimizations of this pass, which occur when either constant is a
5979 multiple of the other, in which case we replace this with either an
5980 operation or CODE or TCODE.
5982 If we have an unsigned type that is not a sizetype, we cannot do
5983 this since it will change the result if the original computation
5985 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5986 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5987 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5988 || (tcode == MULT_EXPR
5989 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5990 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5991 && code != MULT_EXPR)))
5993 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5995 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5996 *strict_overflow_p = true;
5997 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5998 fold_convert (ctype,
5999 const_binop (TRUNC_DIV_EXPR,
6002 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6004 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6005 *strict_overflow_p = true;
6006 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6007 fold_convert (ctype,
6008 const_binop (TRUNC_DIV_EXPR,
6021 /* Return a node which has the indicated constant VALUE (either 0 or
6022 1), and is of the indicated TYPE. */
6025 constant_boolean_node (int value, tree type)
6027 if (type == integer_type_node)
6028 return value ? integer_one_node : integer_zero_node;
6029 else if (type == boolean_type_node)
6030 return value ? boolean_true_node : boolean_false_node;
6032 return build_int_cst (type, value);
6036 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6037 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6038 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6039 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6040 COND is the first argument to CODE; otherwise (as in the example
6041 given here), it is the second argument. TYPE is the type of the
6042 original expression. Return NULL_TREE if no simplification is
6046 fold_binary_op_with_conditional_arg (location_t loc,
6047 enum tree_code code,
6048 tree type, tree op0, tree op1,
6049 tree cond, tree arg, int cond_first_p)
6051 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6052 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6053 tree test, true_value, false_value;
6054 tree lhs = NULL_TREE;
6055 tree rhs = NULL_TREE;
6057 if (TREE_CODE (cond) == COND_EXPR)
6059 test = TREE_OPERAND (cond, 0);
6060 true_value = TREE_OPERAND (cond, 1);
6061 false_value = TREE_OPERAND (cond, 2);
6062 /* If this operand throws an expression, then it does not make
6063 sense to try to perform a logical or arithmetic operation
6065 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6067 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6072 tree testtype = TREE_TYPE (cond);
6074 true_value = constant_boolean_node (true, testtype);
6075 false_value = constant_boolean_node (false, testtype);
6078 /* This transformation is only worthwhile if we don't have to wrap ARG
6079 in a SAVE_EXPR and the operation can be simplified on at least one
6080 of the branches once its pushed inside the COND_EXPR. */
6081 if (!TREE_CONSTANT (arg)
6082 && (TREE_SIDE_EFFECTS (arg)
6083 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6086 arg = fold_convert_loc (loc, arg_type, arg);
6089 true_value = fold_convert_loc (loc, cond_type, true_value);
6091 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6093 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6097 false_value = fold_convert_loc (loc, cond_type, false_value);
6099 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6101 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6104 /* Check that we have simplified at least one of the branches. */
6105 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6108 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6112 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6114 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6115 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6116 ADDEND is the same as X.
6118 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6119 and finite. The problematic cases are when X is zero, and its mode
6120 has signed zeros. In the case of rounding towards -infinity,
6121 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6122 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6125 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6127 if (!real_zerop (addend))
6130 /* Don't allow the fold with -fsignaling-nans. */
6131 if (HONOR_SNANS (TYPE_MODE (type)))
6134 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6135 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6138 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6139 if (TREE_CODE (addend) == REAL_CST
6140 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6143 /* The mode has signed zeros, and we have to honor their sign.
6144 In this situation, there is only one case we can return true for.
6145 X - 0 is the same as X unless rounding towards -infinity is
6147 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6150 /* Subroutine of fold() that checks comparisons of built-in math
6151 functions against real constants.
6153 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6154 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6155 is the type of the result and ARG0 and ARG1 are the operands of the
6156 comparison. ARG1 must be a TREE_REAL_CST.
6158 The function returns the constant folded tree if a simplification
6159 can be made, and NULL_TREE otherwise. */
6162 fold_mathfn_compare (location_t loc,
6163 enum built_in_function fcode, enum tree_code code,
6164 tree type, tree arg0, tree arg1)
6168 if (BUILTIN_SQRT_P (fcode))
6170 tree arg = CALL_EXPR_ARG (arg0, 0);
6171 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6173 c = TREE_REAL_CST (arg1);
6174 if (REAL_VALUE_NEGATIVE (c))
6176 /* sqrt(x) < y is always false, if y is negative. */
6177 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6178 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6180 /* sqrt(x) > y is always true, if y is negative and we
6181 don't care about NaNs, i.e. negative values of x. */
6182 if (code == NE_EXPR || !HONOR_NANS (mode))
6183 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6185 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6186 return fold_build2_loc (loc, GE_EXPR, type, arg,
6187 build_real (TREE_TYPE (arg), dconst0));
6189 else if (code == GT_EXPR || code == GE_EXPR)
6193 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6194 real_convert (&c2, mode, &c2);
6196 if (REAL_VALUE_ISINF (c2))
6198 /* sqrt(x) > y is x == +Inf, when y is very large. */
6199 if (HONOR_INFINITIES (mode))
6200 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6201 build_real (TREE_TYPE (arg), c2));
6203 /* sqrt(x) > y is always false, when y is very large
6204 and we don't care about infinities. */
6205 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6208 /* sqrt(x) > c is the same as x > c*c. */
6209 return fold_build2_loc (loc, code, type, arg,
6210 build_real (TREE_TYPE (arg), c2));
6212 else if (code == LT_EXPR || code == LE_EXPR)
6216 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6217 real_convert (&c2, mode, &c2);
6219 if (REAL_VALUE_ISINF (c2))
6221 /* sqrt(x) < y is always true, when y is a very large
6222 value and we don't care about NaNs or Infinities. */
6223 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6224 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6226 /* sqrt(x) < y is x != +Inf when y is very large and we
6227 don't care about NaNs. */
6228 if (! HONOR_NANS (mode))
6229 return fold_build2_loc (loc, NE_EXPR, type, arg,
6230 build_real (TREE_TYPE (arg), c2));
6232 /* sqrt(x) < y is x >= 0 when y is very large and we
6233 don't care about Infinities. */
6234 if (! HONOR_INFINITIES (mode))
6235 return fold_build2_loc (loc, GE_EXPR, type, arg,
6236 build_real (TREE_TYPE (arg), dconst0));
6238 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6239 if (lang_hooks.decls.global_bindings_p () != 0
6240 || CONTAINS_PLACEHOLDER_P (arg))
6243 arg = save_expr (arg);
6244 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6245 fold_build2_loc (loc, GE_EXPR, type, arg,
6246 build_real (TREE_TYPE (arg),
6248 fold_build2_loc (loc, NE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg),
6253 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6254 if (! HONOR_NANS (mode))
6255 return fold_build2_loc (loc, code, type, arg,
6256 build_real (TREE_TYPE (arg), c2));
6258 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6259 if (lang_hooks.decls.global_bindings_p () == 0
6260 && ! CONTAINS_PLACEHOLDER_P (arg))
6262 arg = save_expr (arg);
6263 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6264 fold_build2_loc (loc, GE_EXPR, type, arg,
6265 build_real (TREE_TYPE (arg),
6267 fold_build2_loc (loc, code, type, arg,
6268 build_real (TREE_TYPE (arg),
6277 /* Subroutine of fold() that optimizes comparisons against Infinities,
6278 either +Inf or -Inf.
6280 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6281 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6282 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6284 The function returns the constant folded tree if a simplification
6285 can be made, and NULL_TREE otherwise. */
6288 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6289 tree arg0, tree arg1)
6291 enum machine_mode mode;
6292 REAL_VALUE_TYPE max;
6296 mode = TYPE_MODE (TREE_TYPE (arg0));
6298 /* For negative infinity swap the sense of the comparison. */
6299 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6301 code = swap_tree_comparison (code);
6306 /* x > +Inf is always false, if with ignore sNANs. */
6307 if (HONOR_SNANS (mode))
6309 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6312 /* x <= +Inf is always true, if we don't case about NaNs. */
6313 if (! HONOR_NANS (mode))
6314 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6316 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6317 if (lang_hooks.decls.global_bindings_p () == 0
6318 && ! CONTAINS_PLACEHOLDER_P (arg0))
6320 arg0 = save_expr (arg0);
6321 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6327 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6328 real_maxval (&max, neg, mode);
6329 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6333 /* x < +Inf is always equal to x <= DBL_MAX. */
6334 real_maxval (&max, neg, mode);
6335 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6336 arg0, build_real (TREE_TYPE (arg0), max));
6339 /* x != +Inf is always equal to !(x > DBL_MAX). */
6340 real_maxval (&max, neg, mode);
6341 if (! HONOR_NANS (mode))
6342 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6343 arg0, build_real (TREE_TYPE (arg0), max));
6345 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6346 arg0, build_real (TREE_TYPE (arg0), max));
6347 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6356 /* Subroutine of fold() that optimizes comparisons of a division by
6357 a nonzero integer constant against an integer constant, i.e.
6360 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6361 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6362 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6364 The function returns the constant folded tree if a simplification
6365 can be made, and NULL_TREE otherwise. */
6368 fold_div_compare (location_t loc,
6369 enum tree_code code, tree type, tree arg0, tree arg1)
6371 tree prod, tmp, hi, lo;
6372 tree arg00 = TREE_OPERAND (arg0, 0);
6373 tree arg01 = TREE_OPERAND (arg0, 1);
6375 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6379 /* We have to do this the hard way to detect unsigned overflow.
6380 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6381 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6382 TREE_INT_CST_HIGH (arg01),
6383 TREE_INT_CST_LOW (arg1),
6384 TREE_INT_CST_HIGH (arg1),
6385 &val.low, &val.high, unsigned_p);
6386 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6387 neg_overflow = false;
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1), 0);
6395 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6396 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6397 TREE_INT_CST_HIGH (prod),
6398 TREE_INT_CST_LOW (tmp),
6399 TREE_INT_CST_HIGH (tmp),
6400 &val.low, &val.high, unsigned_p);
6401 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6402 -1, overflow | TREE_OVERFLOW (prod));
6404 else if (tree_int_cst_sgn (arg01) >= 0)
6406 tmp = int_const_binop (MINUS_EXPR, arg01,
6407 build_int_cst (TREE_TYPE (arg01), 1), 0);
6408 switch (tree_int_cst_sgn (arg1))
6411 neg_overflow = true;
6412 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6417 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6422 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6432 /* A negative divisor reverses the relational operators. */
6433 code = swap_tree_comparison (code);
6435 tmp = int_const_binop (PLUS_EXPR, arg01,
6436 build_int_cst (TREE_TYPE (arg01), 1), 0);
6437 switch (tree_int_cst_sgn (arg1))
6440 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6445 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6450 neg_overflow = true;
6451 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6463 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6464 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6465 if (TREE_OVERFLOW (hi))
6466 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6467 if (TREE_OVERFLOW (lo))
6468 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6469 return build_range_check (loc, type, arg00, 1, lo, hi);
6472 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6473 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6474 if (TREE_OVERFLOW (hi))
6475 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6476 if (TREE_OVERFLOW (lo))
6477 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6478 return build_range_check (loc, type, arg00, 0, lo, hi);
6481 if (TREE_OVERFLOW (lo))
6483 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6484 return omit_one_operand_loc (loc, type, tmp, arg00);
6486 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6489 if (TREE_OVERFLOW (hi))
6491 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6492 return omit_one_operand_loc (loc, type, tmp, arg00);
6494 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6497 if (TREE_OVERFLOW (hi))
6499 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6500 return omit_one_operand_loc (loc, type, tmp, arg00);
6502 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6505 if (TREE_OVERFLOW (lo))
6507 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6508 return omit_one_operand_loc (loc, type, tmp, arg00);
6510 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6520 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6521 equality/inequality test, then return a simplified form of the test
6522 using a sign testing. Otherwise return NULL. TYPE is the desired
6526 fold_single_bit_test_into_sign_test (location_t loc,
6527 enum tree_code code, tree arg0, tree arg1,
6530 /* If this is testing a single bit, we can optimize the test. */
6531 if ((code == NE_EXPR || code == EQ_EXPR)
6532 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6533 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6535 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6536 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6537 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6539 if (arg00 != NULL_TREE
6540 /* This is only a win if casting to a signed type is cheap,
6541 i.e. when arg00's type is not a partial mode. */
6542 && TYPE_PRECISION (TREE_TYPE (arg00))
6543 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6545 tree stype = signed_type_for (TREE_TYPE (arg00));
6546 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6548 fold_convert_loc (loc, stype, arg00),
6549 build_int_cst (stype, 0));
6556 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6557 equality/inequality test, then return a simplified form of
6558 the test using shifts and logical operations. Otherwise return
6559 NULL. TYPE is the desired result type. */
6562 fold_single_bit_test (location_t loc, enum tree_code code,
6563 tree arg0, tree arg1, tree result_type)
6565 /* If this is testing a single bit, we can optimize the test. */
6566 if ((code == NE_EXPR || code == EQ_EXPR)
6567 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6568 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6570 tree inner = TREE_OPERAND (arg0, 0);
6571 tree type = TREE_TYPE (arg0);
6572 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6573 enum machine_mode operand_mode = TYPE_MODE (type);
6575 tree signed_type, unsigned_type, intermediate_type;
6578 /* First, see if we can fold the single bit test into a sign-bit
6580 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6585 /* Otherwise we have (A & C) != 0 where C is a single bit,
6586 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6587 Similarly for (A & C) == 0. */
6589 /* If INNER is a right shift of a constant and it plus BITNUM does
6590 not overflow, adjust BITNUM and INNER. */
6591 if (TREE_CODE (inner) == RSHIFT_EXPR
6592 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6593 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6594 && bitnum < TYPE_PRECISION (type)
6595 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6596 bitnum - TYPE_PRECISION (type)))
6598 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6599 inner = TREE_OPERAND (inner, 0);
6602 /* If we are going to be able to omit the AND below, we must do our
6603 operations as unsigned. If we must use the AND, we have a choice.
6604 Normally unsigned is faster, but for some machines signed is. */
6605 #ifdef LOAD_EXTEND_OP
6606 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6607 && !flag_syntax_only) ? 0 : 1;
6612 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6613 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6614 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6615 inner = fold_convert_loc (loc, intermediate_type, inner);
6618 inner = build2 (RSHIFT_EXPR, intermediate_type,
6619 inner, size_int (bitnum));
6621 one = build_int_cst (intermediate_type, 1);
6623 if (code == EQ_EXPR)
6624 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6626 /* Put the AND last so it can combine with more things. */
6627 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6629 /* Make sure to return the proper type. */
6630 inner = fold_convert_loc (loc, result_type, inner);
6637 /* Check whether we are allowed to reorder operands arg0 and arg1,
6638 such that the evaluation of arg1 occurs before arg0. */
6641 reorder_operands_p (const_tree arg0, const_tree arg1)
6643 if (! flag_evaluation_order)
6645 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6647 return ! TREE_SIDE_EFFECTS (arg0)
6648 && ! TREE_SIDE_EFFECTS (arg1);
6651 /* Test whether it is preferable two swap two operands, ARG0 and
6652 ARG1, for example because ARG0 is an integer constant and ARG1
6653 isn't. If REORDER is true, only recommend swapping if we can
6654 evaluate the operands in reverse order. */
6657 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6659 STRIP_SIGN_NOPS (arg0);
6660 STRIP_SIGN_NOPS (arg1);
6662 if (TREE_CODE (arg1) == INTEGER_CST)
6664 if (TREE_CODE (arg0) == INTEGER_CST)
6667 if (TREE_CODE (arg1) == REAL_CST)
6669 if (TREE_CODE (arg0) == REAL_CST)
6672 if (TREE_CODE (arg1) == FIXED_CST)
6674 if (TREE_CODE (arg0) == FIXED_CST)
6677 if (TREE_CODE (arg1) == COMPLEX_CST)
6679 if (TREE_CODE (arg0) == COMPLEX_CST)
6682 if (TREE_CONSTANT (arg1))
6684 if (TREE_CONSTANT (arg0))
6687 if (optimize_function_for_size_p (cfun))
6690 if (reorder && flag_evaluation_order
6691 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6694 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6695 for commutative and comparison operators. Ensuring a canonical
6696 form allows the optimizers to find additional redundancies without
6697 having to explicitly check for both orderings. */
6698 if (TREE_CODE (arg0) == SSA_NAME
6699 && TREE_CODE (arg1) == SSA_NAME
6700 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6703 /* Put SSA_NAMEs last. */
6704 if (TREE_CODE (arg1) == SSA_NAME)
6706 if (TREE_CODE (arg0) == SSA_NAME)
6709 /* Put variables last. */
6718 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6719 ARG0 is extended to a wider type. */
6722 fold_widened_comparison (location_t loc, enum tree_code code,
6723 tree type, tree arg0, tree arg1)
6725 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6727 tree shorter_type, outer_type;
6731 if (arg0_unw == arg0)
6733 shorter_type = TREE_TYPE (arg0_unw);
6735 #ifdef HAVE_canonicalize_funcptr_for_compare
6736 /* Disable this optimization if we're casting a function pointer
6737 type on targets that require function pointer canonicalization. */
6738 if (HAVE_canonicalize_funcptr_for_compare
6739 && TREE_CODE (shorter_type) == POINTER_TYPE
6740 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6744 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6747 arg1_unw = get_unwidened (arg1, NULL_TREE);
6749 /* If possible, express the comparison in the shorter mode. */
6750 if ((code == EQ_EXPR || code == NE_EXPR
6751 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6752 && (TREE_TYPE (arg1_unw) == shorter_type
6753 || ((TYPE_PRECISION (shorter_type)
6754 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6755 && (TYPE_UNSIGNED (shorter_type)
6756 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6757 || (TREE_CODE (arg1_unw) == INTEGER_CST
6758 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6759 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6760 && int_fits_type_p (arg1_unw, shorter_type))))
6761 return fold_build2_loc (loc, code, type, arg0_unw,
6762 fold_convert_loc (loc, shorter_type, arg1_unw));
6764 if (TREE_CODE (arg1_unw) != INTEGER_CST
6765 || TREE_CODE (shorter_type) != INTEGER_TYPE
6766 || !int_fits_type_p (arg1_unw, shorter_type))
6769 /* If we are comparing with the integer that does not fit into the range
6770 of the shorter type, the result is known. */
6771 outer_type = TREE_TYPE (arg1_unw);
6772 min = lower_bound_in_type (outer_type, shorter_type);
6773 max = upper_bound_in_type (outer_type, shorter_type);
6775 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6777 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6784 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6795 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6797 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6802 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6804 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6813 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6814 ARG0 just the signedness is changed. */
6817 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6818 tree arg0, tree arg1)
6821 tree inner_type, outer_type;
6823 if (!CONVERT_EXPR_P (arg0))
6826 outer_type = TREE_TYPE (arg0);
6827 arg0_inner = TREE_OPERAND (arg0, 0);
6828 inner_type = TREE_TYPE (arg0_inner);
6830 #ifdef HAVE_canonicalize_funcptr_for_compare
6831 /* Disable this optimization if we're casting a function pointer
6832 type on targets that require function pointer canonicalization. */
6833 if (HAVE_canonicalize_funcptr_for_compare
6834 && TREE_CODE (inner_type) == POINTER_TYPE
6835 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6839 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6842 if (TREE_CODE (arg1) != INTEGER_CST
6843 && !(CONVERT_EXPR_P (arg1)
6844 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6847 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6848 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6853 if (TREE_CODE (arg1) == INTEGER_CST)
6854 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6855 0, TREE_OVERFLOW (arg1));
6857 arg1 = fold_convert_loc (loc, inner_type, arg1);
6859 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6862 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6863 step of the array. Reconstructs s and delta in the case of s *
6864 delta being an integer constant (and thus already folded). ADDR is
6865 the address. MULT is the multiplicative expression. If the
6866 function succeeds, the new address expression is returned.
6867 Otherwise NULL_TREE is returned. LOC is the location of the
6868 resulting expression. */
6871 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6873 tree s, delta, step;
6874 tree ref = TREE_OPERAND (addr, 0), pref;
6879 /* Strip the nops that might be added when converting op1 to sizetype. */
6882 /* Canonicalize op1 into a possibly non-constant delta
6883 and an INTEGER_CST s. */
6884 if (TREE_CODE (op1) == MULT_EXPR)
6886 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6891 if (TREE_CODE (arg0) == INTEGER_CST)
6896 else if (TREE_CODE (arg1) == INTEGER_CST)
6904 else if (TREE_CODE (op1) == INTEGER_CST)
6911 /* Simulate we are delta * 1. */
6913 s = integer_one_node;
6916 for (;; ref = TREE_OPERAND (ref, 0))
6918 if (TREE_CODE (ref) == ARRAY_REF)
6922 /* Remember if this was a multi-dimensional array. */
6923 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6926 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6929 itype = TREE_TYPE (domain);
6931 step = array_ref_element_size (ref);
6932 if (TREE_CODE (step) != INTEGER_CST)
6937 if (! tree_int_cst_equal (step, s))
6942 /* Try if delta is a multiple of step. */
6943 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6949 /* Only fold here if we can verify we do not overflow one
6950 dimension of a multi-dimensional array. */
6955 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6956 || !TYPE_MAX_VALUE (domain)
6957 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6960 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6961 fold_convert_loc (loc, itype,
6962 TREE_OPERAND (ref, 1)),
6963 fold_convert_loc (loc, itype, delta));
6965 || TREE_CODE (tmp) != INTEGER_CST
6966 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6975 if (!handled_component_p (ref))
6979 /* We found the suitable array reference. So copy everything up to it,
6980 and replace the index. */
6982 pref = TREE_OPERAND (addr, 0);
6983 ret = copy_node (pref);
6984 SET_EXPR_LOCATION (ret, loc);
6989 pref = TREE_OPERAND (pref, 0);
6990 TREE_OPERAND (pos, 0) = copy_node (pref);
6991 pos = TREE_OPERAND (pos, 0);
6994 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6995 fold_convert_loc (loc, itype,
6996 TREE_OPERAND (pos, 1)),
6997 fold_convert_loc (loc, itype, delta));
6999 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7003 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7004 means A >= Y && A != MAX, but in this case we know that
7005 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7008 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7010 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7012 if (TREE_CODE (bound) == LT_EXPR)
7013 a = TREE_OPERAND (bound, 0);
7014 else if (TREE_CODE (bound) == GT_EXPR)
7015 a = TREE_OPERAND (bound, 1);
7019 typea = TREE_TYPE (a);
7020 if (!INTEGRAL_TYPE_P (typea)
7021 && !POINTER_TYPE_P (typea))
7024 if (TREE_CODE (ineq) == LT_EXPR)
7026 a1 = TREE_OPERAND (ineq, 1);
7027 y = TREE_OPERAND (ineq, 0);
7029 else if (TREE_CODE (ineq) == GT_EXPR)
7031 a1 = TREE_OPERAND (ineq, 0);
7032 y = TREE_OPERAND (ineq, 1);
7037 if (TREE_TYPE (a1) != typea)
7040 if (POINTER_TYPE_P (typea))
7042 /* Convert the pointer types into integer before taking the difference. */
7043 tree ta = fold_convert_loc (loc, ssizetype, a);
7044 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7045 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7048 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7050 if (!diff || !integer_onep (diff))
7053 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7056 /* Fold a sum or difference of at least one multiplication.
7057 Returns the folded tree or NULL if no simplification could be made. */
7060 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7061 tree arg0, tree arg1)
7063 tree arg00, arg01, arg10, arg11;
7064 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7066 /* (A * C) +- (B * C) -> (A+-B) * C.
7067 (A * C) +- A -> A * (C+-1).
7068 We are most concerned about the case where C is a constant,
7069 but other combinations show up during loop reduction. Since
7070 it is not difficult, try all four possibilities. */
7072 if (TREE_CODE (arg0) == MULT_EXPR)
7074 arg00 = TREE_OPERAND (arg0, 0);
7075 arg01 = TREE_OPERAND (arg0, 1);
7077 else if (TREE_CODE (arg0) == INTEGER_CST)
7079 arg00 = build_one_cst (type);
7084 /* We cannot generate constant 1 for fract. */
7085 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7088 arg01 = build_one_cst (type);
7090 if (TREE_CODE (arg1) == MULT_EXPR)
7092 arg10 = TREE_OPERAND (arg1, 0);
7093 arg11 = TREE_OPERAND (arg1, 1);
7095 else if (TREE_CODE (arg1) == INTEGER_CST)
7097 arg10 = build_one_cst (type);
7098 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7099 the purpose of this canonicalization. */
7100 if (TREE_INT_CST_HIGH (arg1) == -1
7101 && negate_expr_p (arg1)
7102 && code == PLUS_EXPR)
7104 arg11 = negate_expr (arg1);
7112 /* We cannot generate constant 1 for fract. */
7113 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7116 arg11 = build_one_cst (type);
7120 if (operand_equal_p (arg01, arg11, 0))
7121 same = arg01, alt0 = arg00, alt1 = arg10;
7122 else if (operand_equal_p (arg00, arg10, 0))
7123 same = arg00, alt0 = arg01, alt1 = arg11;
7124 else if (operand_equal_p (arg00, arg11, 0))
7125 same = arg00, alt0 = arg01, alt1 = arg10;
7126 else if (operand_equal_p (arg01, arg10, 0))
7127 same = arg01, alt0 = arg00, alt1 = arg11;
7129 /* No identical multiplicands; see if we can find a common
7130 power-of-two factor in non-power-of-two multiplies. This
7131 can help in multi-dimensional array access. */
7132 else if (host_integerp (arg01, 0)
7133 && host_integerp (arg11, 0))
7135 HOST_WIDE_INT int01, int11, tmp;
7138 int01 = TREE_INT_CST_LOW (arg01);
7139 int11 = TREE_INT_CST_LOW (arg11);
7141 /* Move min of absolute values to int11. */
7142 if ((int01 >= 0 ? int01 : -int01)
7143 < (int11 >= 0 ? int11 : -int11))
7145 tmp = int01, int01 = int11, int11 = tmp;
7146 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7153 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7154 /* The remainder should not be a constant, otherwise we
7155 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7156 increased the number of multiplications necessary. */
7157 && TREE_CODE (arg10) != INTEGER_CST)
7159 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7160 build_int_cst (TREE_TYPE (arg00),
7165 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7170 return fold_build2_loc (loc, MULT_EXPR, type,
7171 fold_build2_loc (loc, code, type,
7172 fold_convert_loc (loc, type, alt0),
7173 fold_convert_loc (loc, type, alt1)),
7174 fold_convert_loc (loc, type, same));
7179 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7180 specified by EXPR into the buffer PTR of length LEN bytes.
7181 Return the number of bytes placed in the buffer, or zero
7185 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7187 tree type = TREE_TYPE (expr);
7188 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7189 int byte, offset, word, words;
7190 unsigned char value;
7192 if (total_bytes > len)
7194 words = total_bytes / UNITS_PER_WORD;
7196 for (byte = 0; byte < total_bytes; byte++)
7198 int bitpos = byte * BITS_PER_UNIT;
7199 if (bitpos < HOST_BITS_PER_WIDE_INT)
7200 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7202 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7203 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7205 if (total_bytes > UNITS_PER_WORD)
7207 word = byte / UNITS_PER_WORD;
7208 if (WORDS_BIG_ENDIAN)
7209 word = (words - 1) - word;
7210 offset = word * UNITS_PER_WORD;
7211 if (BYTES_BIG_ENDIAN)
7212 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7214 offset += byte % UNITS_PER_WORD;
7217 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7218 ptr[offset] = value;
7224 /* Subroutine of native_encode_expr. Encode the REAL_CST
7225 specified by EXPR into the buffer PTR of length LEN bytes.
7226 Return the number of bytes placed in the buffer, or zero
7230 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7232 tree type = TREE_TYPE (expr);
7233 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7234 int byte, offset, word, words, bitpos;
7235 unsigned char value;
7237 /* There are always 32 bits in each long, no matter the size of
7238 the hosts long. We handle floating point representations with
7242 if (total_bytes > len)
7244 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7246 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7248 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7249 bitpos += BITS_PER_UNIT)
7251 byte = (bitpos / BITS_PER_UNIT) & 3;
7252 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7254 if (UNITS_PER_WORD < 4)
7256 word = byte / UNITS_PER_WORD;
7257 if (WORDS_BIG_ENDIAN)
7258 word = (words - 1) - word;
7259 offset = word * UNITS_PER_WORD;
7260 if (BYTES_BIG_ENDIAN)
7261 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7263 offset += byte % UNITS_PER_WORD;
7266 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7267 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7272 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7273 specified by EXPR into the buffer PTR of length LEN bytes.
7274 Return the number of bytes placed in the buffer, or zero
7278 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7283 part = TREE_REALPART (expr);
7284 rsize = native_encode_expr (part, ptr, len);
7287 part = TREE_IMAGPART (expr);
7288 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7291 return rsize + isize;
7295 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7296 specified by EXPR into the buffer PTR of length LEN bytes.
7297 Return the number of bytes placed in the buffer, or zero
7301 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7303 int i, size, offset, count;
7304 tree itype, elem, elements;
7307 elements = TREE_VECTOR_CST_ELTS (expr);
7308 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7309 itype = TREE_TYPE (TREE_TYPE (expr));
7310 size = GET_MODE_SIZE (TYPE_MODE (itype));
7311 for (i = 0; i < count; i++)
7315 elem = TREE_VALUE (elements);
7316 elements = TREE_CHAIN (elements);
7323 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7328 if (offset + size > len)
7330 memset (ptr+offset, 0, size);
7338 /* Subroutine of native_encode_expr. Encode the STRING_CST
7339 specified by EXPR into the buffer PTR of length LEN bytes.
7340 Return the number of bytes placed in the buffer, or zero
7344 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7346 tree type = TREE_TYPE (expr);
7347 HOST_WIDE_INT total_bytes;
7349 if (TREE_CODE (type) != ARRAY_TYPE
7350 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7351 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7352 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7354 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7355 if (total_bytes > len)
7357 if (TREE_STRING_LENGTH (expr) < total_bytes)
7359 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7360 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7361 total_bytes - TREE_STRING_LENGTH (expr));
7364 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7369 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7370 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7371 buffer PTR of length LEN bytes. Return the number of bytes
7372 placed in the buffer, or zero upon failure. */
7375 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7377 switch (TREE_CODE (expr))
7380 return native_encode_int (expr, ptr, len);
7383 return native_encode_real (expr, ptr, len);
7386 return native_encode_complex (expr, ptr, len);
7389 return native_encode_vector (expr, ptr, len);
7392 return native_encode_string (expr, ptr, len);
7400 /* Subroutine of native_interpret_expr. Interpret the contents of
7401 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7402 If the buffer cannot be interpreted, return NULL_TREE. */
7405 native_interpret_int (tree type, const unsigned char *ptr, int len)
7407 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7408 int byte, offset, word, words;
7409 unsigned char value;
7412 if (total_bytes > len)
7414 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7417 result = double_int_zero;
7418 words = total_bytes / UNITS_PER_WORD;
7420 for (byte = 0; byte < total_bytes; byte++)
7422 int bitpos = byte * BITS_PER_UNIT;
7423 if (total_bytes > UNITS_PER_WORD)
7425 word = byte / UNITS_PER_WORD;
7426 if (WORDS_BIG_ENDIAN)
7427 word = (words - 1) - word;
7428 offset = word * UNITS_PER_WORD;
7429 if (BYTES_BIG_ENDIAN)
7430 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7432 offset += byte % UNITS_PER_WORD;
7435 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7436 value = ptr[offset];
7438 if (bitpos < HOST_BITS_PER_WIDE_INT)
7439 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7441 result.high |= (unsigned HOST_WIDE_INT) value
7442 << (bitpos - HOST_BITS_PER_WIDE_INT);
7445 return double_int_to_tree (type, result);
7449 /* Subroutine of native_interpret_expr. Interpret the contents of
7450 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7451 If the buffer cannot be interpreted, return NULL_TREE. */
7454 native_interpret_real (tree type, const unsigned char *ptr, int len)
7456 enum machine_mode mode = TYPE_MODE (type);
7457 int total_bytes = GET_MODE_SIZE (mode);
7458 int byte, offset, word, words, bitpos;
7459 unsigned char value;
7460 /* There are always 32 bits in each long, no matter the size of
7461 the hosts long. We handle floating point representations with
7466 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7467 if (total_bytes > len || total_bytes > 24)
7469 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7471 memset (tmp, 0, sizeof (tmp));
7472 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7473 bitpos += BITS_PER_UNIT)
7475 byte = (bitpos / BITS_PER_UNIT) & 3;
7476 if (UNITS_PER_WORD < 4)
7478 word = byte / UNITS_PER_WORD;
7479 if (WORDS_BIG_ENDIAN)
7480 word = (words - 1) - word;
7481 offset = word * UNITS_PER_WORD;
7482 if (BYTES_BIG_ENDIAN)
7483 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7485 offset += byte % UNITS_PER_WORD;
7488 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7489 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7491 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7494 real_from_target (&r, tmp, mode);
7495 return build_real (type, r);
7499 /* Subroutine of native_interpret_expr. Interpret the contents of
7500 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7501 If the buffer cannot be interpreted, return NULL_TREE. */
7504 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7506 tree etype, rpart, ipart;
7509 etype = TREE_TYPE (type);
7510 size = GET_MODE_SIZE (TYPE_MODE (etype));
7513 rpart = native_interpret_expr (etype, ptr, size);
7516 ipart = native_interpret_expr (etype, ptr+size, size);
7519 return build_complex (type, rpart, ipart);
7523 /* Subroutine of native_interpret_expr. Interpret the contents of
7524 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7525 If the buffer cannot be interpreted, return NULL_TREE. */
7528 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7530 tree etype, elem, elements;
7533 etype = TREE_TYPE (type);
7534 size = GET_MODE_SIZE (TYPE_MODE (etype));
7535 count = TYPE_VECTOR_SUBPARTS (type);
7536 if (size * count > len)
7539 elements = NULL_TREE;
7540 for (i = count - 1; i >= 0; i--)
7542 elem = native_interpret_expr (etype, ptr+(i*size), size);
7545 elements = tree_cons (NULL_TREE, elem, elements);
7547 return build_vector (type, elements);
7551 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7552 the buffer PTR of length LEN as a constant of type TYPE. For
7553 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7554 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7555 return NULL_TREE. */
7558 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7560 switch (TREE_CODE (type))
7565 return native_interpret_int (type, ptr, len);
7568 return native_interpret_real (type, ptr, len);
7571 return native_interpret_complex (type, ptr, len);
7574 return native_interpret_vector (type, ptr, len);
7582 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7583 TYPE at compile-time. If we're unable to perform the conversion
7584 return NULL_TREE. */
7587 fold_view_convert_expr (tree type, tree expr)
7589 /* We support up to 512-bit values (for V8DFmode). */
7590 unsigned char buffer[64];
7593 /* Check that the host and target are sane. */
7594 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7597 len = native_encode_expr (expr, buffer, sizeof (buffer));
7601 return native_interpret_expr (type, buffer, len);
7604 /* Build an expression for the address of T. Folds away INDIRECT_REF
7605 to avoid confusing the gimplify process. */
7608 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7610 /* The size of the object is not relevant when talking about its address. */
7611 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7612 t = TREE_OPERAND (t, 0);
7614 if (TREE_CODE (t) == INDIRECT_REF)
7616 t = TREE_OPERAND (t, 0);
7618 if (TREE_TYPE (t) != ptrtype)
7620 t = build1 (NOP_EXPR, ptrtype, t);
7621 SET_EXPR_LOCATION (t, loc);
7624 else if (TREE_CODE (t) == MEM_REF
7625 && integer_zerop (TREE_OPERAND (t, 1)))
7626 return TREE_OPERAND (t, 0);
7627 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7629 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7631 if (TREE_TYPE (t) != ptrtype)
7632 t = fold_convert_loc (loc, ptrtype, t);
7636 t = build1 (ADDR_EXPR, ptrtype, t);
7637 SET_EXPR_LOCATION (t, loc);
7643 /* Build an expression for the address of T. */
7646 build_fold_addr_expr_loc (location_t loc, tree t)
7648 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7650 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7653 /* Fold a unary expression of code CODE and type TYPE with operand
7654 OP0. Return the folded expression if folding is successful.
7655 Otherwise, return NULL_TREE. */
7658 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7662 enum tree_code_class kind = TREE_CODE_CLASS (code);
7664 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7665 && TREE_CODE_LENGTH (code) == 1);
7670 if (CONVERT_EXPR_CODE_P (code)
7671 || code == FLOAT_EXPR || code == ABS_EXPR)
7673 /* Don't use STRIP_NOPS, because signedness of argument type
7675 STRIP_SIGN_NOPS (arg0);
7679 /* Strip any conversions that don't change the mode. This
7680 is safe for every expression, except for a comparison
7681 expression because its signedness is derived from its
7684 Note that this is done as an internal manipulation within
7685 the constant folder, in order to find the simplest
7686 representation of the arguments so that their form can be
7687 studied. In any cases, the appropriate type conversions
7688 should be put back in the tree that will get out of the
7694 if (TREE_CODE_CLASS (code) == tcc_unary)
7696 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7697 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7698 fold_build1_loc (loc, code, type,
7699 fold_convert_loc (loc, TREE_TYPE (op0),
7700 TREE_OPERAND (arg0, 1))));
7701 else if (TREE_CODE (arg0) == COND_EXPR)
7703 tree arg01 = TREE_OPERAND (arg0, 1);
7704 tree arg02 = TREE_OPERAND (arg0, 2);
7705 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7706 arg01 = fold_build1_loc (loc, code, type,
7707 fold_convert_loc (loc,
7708 TREE_TYPE (op0), arg01));
7709 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7710 arg02 = fold_build1_loc (loc, code, type,
7711 fold_convert_loc (loc,
7712 TREE_TYPE (op0), arg02));
7713 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7716 /* If this was a conversion, and all we did was to move into
7717 inside the COND_EXPR, bring it back out. But leave it if
7718 it is a conversion from integer to integer and the
7719 result precision is no wider than a word since such a
7720 conversion is cheap and may be optimized away by combine,
7721 while it couldn't if it were outside the COND_EXPR. Then return
7722 so we don't get into an infinite recursion loop taking the
7723 conversion out and then back in. */
7725 if ((CONVERT_EXPR_CODE_P (code)
7726 || code == NON_LVALUE_EXPR)
7727 && TREE_CODE (tem) == COND_EXPR
7728 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7729 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7730 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7731 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7732 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7733 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7734 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7736 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7737 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7738 || flag_syntax_only))
7740 tem = build1 (code, type,
7742 TREE_TYPE (TREE_OPERAND
7743 (TREE_OPERAND (tem, 1), 0)),
7744 TREE_OPERAND (tem, 0),
7745 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7746 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7747 SET_EXPR_LOCATION (tem, loc);
7751 else if (COMPARISON_CLASS_P (arg0))
7753 if (TREE_CODE (type) == BOOLEAN_TYPE)
7755 arg0 = copy_node (arg0);
7756 TREE_TYPE (arg0) = type;
7759 else if (TREE_CODE (type) != INTEGER_TYPE)
7760 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7761 fold_build1_loc (loc, code, type,
7763 fold_build1_loc (loc, code, type,
7764 integer_zero_node));
7771 /* Re-association barriers around constants and other re-association
7772 barriers can be removed. */
7773 if (CONSTANT_CLASS_P (op0)
7774 || TREE_CODE (op0) == PAREN_EXPR)
7775 return fold_convert_loc (loc, type, op0);
7780 case FIX_TRUNC_EXPR:
7781 if (TREE_TYPE (op0) == type)
7784 /* If we have (type) (a CMP b) and type is an integral type, return
7785 new expression involving the new type. */
7786 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7787 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7788 TREE_OPERAND (op0, 1));
7790 /* Handle cases of two conversions in a row. */
7791 if (CONVERT_EXPR_P (op0))
7793 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7794 tree inter_type = TREE_TYPE (op0);
7795 int inside_int = INTEGRAL_TYPE_P (inside_type);
7796 int inside_ptr = POINTER_TYPE_P (inside_type);
7797 int inside_float = FLOAT_TYPE_P (inside_type);
7798 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7799 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7800 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7801 int inter_int = INTEGRAL_TYPE_P (inter_type);
7802 int inter_ptr = POINTER_TYPE_P (inter_type);
7803 int inter_float = FLOAT_TYPE_P (inter_type);
7804 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7805 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7806 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7807 int final_int = INTEGRAL_TYPE_P (type);
7808 int final_ptr = POINTER_TYPE_P (type);
7809 int final_float = FLOAT_TYPE_P (type);
7810 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7811 unsigned int final_prec = TYPE_PRECISION (type);
7812 int final_unsignedp = TYPE_UNSIGNED (type);
7814 /* In addition to the cases of two conversions in a row
7815 handled below, if we are converting something to its own
7816 type via an object of identical or wider precision, neither
7817 conversion is needed. */
7818 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7819 && (((inter_int || inter_ptr) && final_int)
7820 || (inter_float && final_float))
7821 && inter_prec >= final_prec)
7822 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7824 /* Likewise, if the intermediate and initial types are either both
7825 float or both integer, we don't need the middle conversion if the
7826 former is wider than the latter and doesn't change the signedness
7827 (for integers). Avoid this if the final type is a pointer since
7828 then we sometimes need the middle conversion. Likewise if the
7829 final type has a precision not equal to the size of its mode. */
7830 if (((inter_int && inside_int)
7831 || (inter_float && inside_float)
7832 || (inter_vec && inside_vec))
7833 && inter_prec >= inside_prec
7834 && (inter_float || inter_vec
7835 || inter_unsignedp == inside_unsignedp)
7836 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7837 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7839 && (! final_vec || inter_prec == inside_prec))
7840 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7842 /* If we have a sign-extension of a zero-extended value, we can
7843 replace that by a single zero-extension. */
7844 if (inside_int && inter_int && final_int
7845 && inside_prec < inter_prec && inter_prec < final_prec
7846 && inside_unsignedp && !inter_unsignedp)
7847 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7849 /* Two conversions in a row are not needed unless:
7850 - some conversion is floating-point (overstrict for now), or
7851 - some conversion is a vector (overstrict for now), or
7852 - the intermediate type is narrower than both initial and
7854 - the intermediate type and innermost type differ in signedness,
7855 and the outermost type is wider than the intermediate, or
7856 - the initial type is a pointer type and the precisions of the
7857 intermediate and final types differ, or
7858 - the final type is a pointer type and the precisions of the
7859 initial and intermediate types differ. */
7860 if (! inside_float && ! inter_float && ! final_float
7861 && ! inside_vec && ! inter_vec && ! final_vec
7862 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7863 && ! (inside_int && inter_int
7864 && inter_unsignedp != inside_unsignedp
7865 && inter_prec < final_prec)
7866 && ((inter_unsignedp && inter_prec > inside_prec)
7867 == (final_unsignedp && final_prec > inter_prec))
7868 && ! (inside_ptr && inter_prec != final_prec)
7869 && ! (final_ptr && inside_prec != inter_prec)
7870 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7871 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7872 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7875 /* Handle (T *)&A.B.C for A being of type T and B and C
7876 living at offset zero. This occurs frequently in
7877 C++ upcasting and then accessing the base. */
7878 if (TREE_CODE (op0) == ADDR_EXPR
7879 && POINTER_TYPE_P (type)
7880 && handled_component_p (TREE_OPERAND (op0, 0)))
7882 HOST_WIDE_INT bitsize, bitpos;
7884 enum machine_mode mode;
7885 int unsignedp, volatilep;
7886 tree base = TREE_OPERAND (op0, 0);
7887 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7888 &mode, &unsignedp, &volatilep, false);
7889 /* If the reference was to a (constant) zero offset, we can use
7890 the address of the base if it has the same base type
7891 as the result type and the pointer type is unqualified. */
7892 if (! offset && bitpos == 0
7893 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7894 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7895 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7896 return fold_convert_loc (loc, type,
7897 build_fold_addr_expr_loc (loc, base));
7900 if (TREE_CODE (op0) == MODIFY_EXPR
7901 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7902 /* Detect assigning a bitfield. */
7903 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7905 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7907 /* Don't leave an assignment inside a conversion
7908 unless assigning a bitfield. */
7909 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7910 /* First do the assignment, then return converted constant. */
7911 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7912 TREE_NO_WARNING (tem) = 1;
7913 TREE_USED (tem) = 1;
7914 SET_EXPR_LOCATION (tem, loc);
7918 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7919 constants (if x has signed type, the sign bit cannot be set
7920 in c). This folds extension into the BIT_AND_EXPR.
7921 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7922 very likely don't have maximal range for their precision and this
7923 transformation effectively doesn't preserve non-maximal ranges. */
7924 if (TREE_CODE (type) == INTEGER_TYPE
7925 && TREE_CODE (op0) == BIT_AND_EXPR
7926 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7928 tree and_expr = op0;
7929 tree and0 = TREE_OPERAND (and_expr, 0);
7930 tree and1 = TREE_OPERAND (and_expr, 1);
7933 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7934 || (TYPE_PRECISION (type)
7935 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7937 else if (TYPE_PRECISION (TREE_TYPE (and1))
7938 <= HOST_BITS_PER_WIDE_INT
7939 && host_integerp (and1, 1))
7941 unsigned HOST_WIDE_INT cst;
7943 cst = tree_low_cst (and1, 1);
7944 cst &= (HOST_WIDE_INT) -1
7945 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7946 change = (cst == 0);
7947 #ifdef LOAD_EXTEND_OP
7949 && !flag_syntax_only
7950 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7953 tree uns = unsigned_type_for (TREE_TYPE (and0));
7954 and0 = fold_convert_loc (loc, uns, and0);
7955 and1 = fold_convert_loc (loc, uns, and1);
7961 tem = force_fit_type_double (type, tree_to_double_int (and1),
7962 0, TREE_OVERFLOW (and1));
7963 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7964 fold_convert_loc (loc, type, and0), tem);
7968 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7969 when one of the new casts will fold away. Conservatively we assume
7970 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7971 if (POINTER_TYPE_P (type)
7972 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7973 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7974 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7975 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7977 tree arg00 = TREE_OPERAND (arg0, 0);
7978 tree arg01 = TREE_OPERAND (arg0, 1);
7980 return fold_build2_loc (loc,
7981 TREE_CODE (arg0), type,
7982 fold_convert_loc (loc, type, arg00),
7983 fold_convert_loc (loc, sizetype, arg01));
7986 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7987 of the same precision, and X is an integer type not narrower than
7988 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7989 if (INTEGRAL_TYPE_P (type)
7990 && TREE_CODE (op0) == BIT_NOT_EXPR
7991 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7992 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7993 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7995 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7996 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7997 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7998 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7999 fold_convert_loc (loc, type, tem));
8002 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8003 type of X and Y (integer types only). */
8004 if (INTEGRAL_TYPE_P (type)
8005 && TREE_CODE (op0) == MULT_EXPR
8006 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8007 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8009 /* Be careful not to introduce new overflows. */
8011 if (TYPE_OVERFLOW_WRAPS (type))
8014 mult_type = unsigned_type_for (type);
8016 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8018 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8019 fold_convert_loc (loc, mult_type,
8020 TREE_OPERAND (op0, 0)),
8021 fold_convert_loc (loc, mult_type,
8022 TREE_OPERAND (op0, 1)));
8023 return fold_convert_loc (loc, type, tem);
8027 tem = fold_convert_const (code, type, op0);
8028 return tem ? tem : NULL_TREE;
8030 case ADDR_SPACE_CONVERT_EXPR:
8031 if (integer_zerop (arg0))
8032 return fold_convert_const (code, type, arg0);
8035 case FIXED_CONVERT_EXPR:
8036 tem = fold_convert_const (code, type, arg0);
8037 return tem ? tem : NULL_TREE;
8039 case VIEW_CONVERT_EXPR:
8040 if (TREE_TYPE (op0) == type)
8042 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8043 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8044 type, TREE_OPERAND (op0, 0));
8045 if (TREE_CODE (op0) == MEM_REF)
8046 return fold_build2_loc (loc, MEM_REF, type,
8047 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8049 /* For integral conversions with the same precision or pointer
8050 conversions use a NOP_EXPR instead. */
8051 if ((INTEGRAL_TYPE_P (type)
8052 || POINTER_TYPE_P (type))
8053 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8054 || POINTER_TYPE_P (TREE_TYPE (op0)))
8055 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8056 return fold_convert_loc (loc, type, op0);
8058 /* Strip inner integral conversions that do not change the precision. */
8059 if (CONVERT_EXPR_P (op0)
8060 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8061 || POINTER_TYPE_P (TREE_TYPE (op0)))
8062 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8063 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8064 && (TYPE_PRECISION (TREE_TYPE (op0))
8065 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8066 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8067 type, TREE_OPERAND (op0, 0));
8069 return fold_view_convert_expr (type, op0);
8072 tem = fold_negate_expr (loc, arg0);
8074 return fold_convert_loc (loc, type, tem);
8078 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8079 return fold_abs_const (arg0, type);
8080 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8081 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8082 /* Convert fabs((double)float) into (double)fabsf(float). */
8083 else if (TREE_CODE (arg0) == NOP_EXPR
8084 && TREE_CODE (type) == REAL_TYPE)
8086 tree targ0 = strip_float_extensions (arg0);
8088 return fold_convert_loc (loc, type,
8089 fold_build1_loc (loc, ABS_EXPR,
8093 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8094 else if (TREE_CODE (arg0) == ABS_EXPR)
8096 else if (tree_expr_nonnegative_p (arg0))
8099 /* Strip sign ops from argument. */
8100 if (TREE_CODE (type) == REAL_TYPE)
8102 tem = fold_strip_sign_ops (arg0);
8104 return fold_build1_loc (loc, ABS_EXPR, type,
8105 fold_convert_loc (loc, type, tem));
8110 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8111 return fold_convert_loc (loc, type, arg0);
8112 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8114 tree itype = TREE_TYPE (type);
8115 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8116 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8117 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8118 negate_expr (ipart));
8120 if (TREE_CODE (arg0) == COMPLEX_CST)
8122 tree itype = TREE_TYPE (type);
8123 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8124 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8125 return build_complex (type, rpart, negate_expr (ipart));
8127 if (TREE_CODE (arg0) == CONJ_EXPR)
8128 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8132 if (TREE_CODE (arg0) == INTEGER_CST)
8133 return fold_not_const (arg0, type);
8134 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8135 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8136 /* Convert ~ (-A) to A - 1. */
8137 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8138 return fold_build2_loc (loc, MINUS_EXPR, type,
8139 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8140 build_int_cst (type, 1));
8141 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8142 else if (INTEGRAL_TYPE_P (type)
8143 && ((TREE_CODE (arg0) == MINUS_EXPR
8144 && integer_onep (TREE_OPERAND (arg0, 1)))
8145 || (TREE_CODE (arg0) == PLUS_EXPR
8146 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8147 return fold_build1_loc (loc, NEGATE_EXPR, type,
8148 fold_convert_loc (loc, type,
8149 TREE_OPERAND (arg0, 0)));
8150 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8151 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8152 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8153 fold_convert_loc (loc, type,
8154 TREE_OPERAND (arg0, 0)))))
8155 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8156 fold_convert_loc (loc, type,
8157 TREE_OPERAND (arg0, 1)));
8158 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8159 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8160 fold_convert_loc (loc, type,
8161 TREE_OPERAND (arg0, 1)))))
8162 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8163 fold_convert_loc (loc, type,
8164 TREE_OPERAND (arg0, 0)), tem);
8165 /* Perform BIT_NOT_EXPR on each element individually. */
8166 else if (TREE_CODE (arg0) == VECTOR_CST)
8168 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8169 int count = TYPE_VECTOR_SUBPARTS (type), i;
8171 for (i = 0; i < count; i++)
8175 elem = TREE_VALUE (elements);
8176 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8177 if (elem == NULL_TREE)
8179 elements = TREE_CHAIN (elements);
8182 elem = build_int_cst (TREE_TYPE (type), -1);
8183 list = tree_cons (NULL_TREE, elem, list);
8186 return build_vector (type, nreverse (list));
8191 case TRUTH_NOT_EXPR:
8192 /* The argument to invert_truthvalue must have Boolean type. */
8193 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8194 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8196 /* Note that the operand of this must be an int
8197 and its values must be 0 or 1.
8198 ("true" is a fixed value perhaps depending on the language,
8199 but we don't handle values other than 1 correctly yet.) */
8200 tem = fold_truth_not_expr (loc, arg0);
8203 return fold_convert_loc (loc, type, tem);
8206 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8207 return fold_convert_loc (loc, type, arg0);
8208 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8209 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8210 TREE_OPERAND (arg0, 1));
8211 if (TREE_CODE (arg0) == COMPLEX_CST)
8212 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8213 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8215 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8216 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8217 fold_build1_loc (loc, REALPART_EXPR, itype,
8218 TREE_OPERAND (arg0, 0)),
8219 fold_build1_loc (loc, REALPART_EXPR, itype,
8220 TREE_OPERAND (arg0, 1)));
8221 return fold_convert_loc (loc, type, tem);
8223 if (TREE_CODE (arg0) == CONJ_EXPR)
8225 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8226 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8227 TREE_OPERAND (arg0, 0));
8228 return fold_convert_loc (loc, type, tem);
8230 if (TREE_CODE (arg0) == CALL_EXPR)
8232 tree fn = get_callee_fndecl (arg0);
8233 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8234 switch (DECL_FUNCTION_CODE (fn))
8236 CASE_FLT_FN (BUILT_IN_CEXPI):
8237 fn = mathfn_built_in (type, BUILT_IN_COS);
8239 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8249 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8250 return build_zero_cst (type);
8251 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8252 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8253 TREE_OPERAND (arg0, 0));
8254 if (TREE_CODE (arg0) == COMPLEX_CST)
8255 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8256 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8258 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8259 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8260 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8261 TREE_OPERAND (arg0, 0)),
8262 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8263 TREE_OPERAND (arg0, 1)));
8264 return fold_convert_loc (loc, type, tem);
8266 if (TREE_CODE (arg0) == CONJ_EXPR)
8268 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8269 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8270 return fold_convert_loc (loc, type, negate_expr (tem));
8272 if (TREE_CODE (arg0) == CALL_EXPR)
8274 tree fn = get_callee_fndecl (arg0);
8275 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8276 switch (DECL_FUNCTION_CODE (fn))
8278 CASE_FLT_FN (BUILT_IN_CEXPI):
8279 fn = mathfn_built_in (type, BUILT_IN_SIN);
8281 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8291 /* Fold *&X to X if X is an lvalue. */
8292 if (TREE_CODE (op0) == ADDR_EXPR)
8294 tree op00 = TREE_OPERAND (op0, 0);
8295 if ((TREE_CODE (op00) == VAR_DECL
8296 || TREE_CODE (op00) == PARM_DECL
8297 || TREE_CODE (op00) == RESULT_DECL)
8298 && !TREE_READONLY (op00))
8305 } /* switch (code) */
8309 /* If the operation was a conversion do _not_ mark a resulting constant
8310 with TREE_OVERFLOW if the original constant was not. These conversions
8311 have implementation defined behavior and retaining the TREE_OVERFLOW
8312 flag here would confuse later passes such as VRP. */
8314 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8315 tree type, tree op0)
8317 tree res = fold_unary_loc (loc, code, type, op0);
8319 && TREE_CODE (res) == INTEGER_CST
8320 && TREE_CODE (op0) == INTEGER_CST
8321 && CONVERT_EXPR_CODE_P (code))
8322 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8327 /* Fold a binary expression of code CODE and type TYPE with operands
8328 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8329 Return the folded expression if folding is successful. Otherwise,
8330 return NULL_TREE. */
8333 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8335 enum tree_code compl_code;
8337 if (code == MIN_EXPR)
8338 compl_code = MAX_EXPR;
8339 else if (code == MAX_EXPR)
8340 compl_code = MIN_EXPR;
8344 /* MIN (MAX (a, b), b) == b. */
8345 if (TREE_CODE (op0) == compl_code
8346 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8347 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8349 /* MIN (MAX (b, a), b) == b. */
8350 if (TREE_CODE (op0) == compl_code
8351 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8352 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8353 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8355 /* MIN (a, MAX (a, b)) == a. */
8356 if (TREE_CODE (op1) == compl_code
8357 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8358 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8359 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8361 /* MIN (a, MAX (b, a)) == a. */
8362 if (TREE_CODE (op1) == compl_code
8363 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8364 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8365 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8370 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8371 by changing CODE to reduce the magnitude of constants involved in
8372 ARG0 of the comparison.
8373 Returns a canonicalized comparison tree if a simplification was
8374 possible, otherwise returns NULL_TREE.
8375 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8376 valid if signed overflow is undefined. */
8379 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8380 tree arg0, tree arg1,
8381 bool *strict_overflow_p)
8383 enum tree_code code0 = TREE_CODE (arg0);
8384 tree t, cst0 = NULL_TREE;
8388 /* Match A +- CST code arg1 and CST code arg1. We can change the
8389 first form only if overflow is undefined. */
8390 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8391 /* In principle pointers also have undefined overflow behavior,
8392 but that causes problems elsewhere. */
8393 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8394 && (code0 == MINUS_EXPR
8395 || code0 == PLUS_EXPR)
8396 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8397 || code0 == INTEGER_CST))
8400 /* Identify the constant in arg0 and its sign. */
8401 if (code0 == INTEGER_CST)
8404 cst0 = TREE_OPERAND (arg0, 1);
8405 sgn0 = tree_int_cst_sgn (cst0);
8407 /* Overflowed constants and zero will cause problems. */
8408 if (integer_zerop (cst0)
8409 || TREE_OVERFLOW (cst0))
8412 /* See if we can reduce the magnitude of the constant in
8413 arg0 by changing the comparison code. */
8414 if (code0 == INTEGER_CST)
8416 /* CST <= arg1 -> CST-1 < arg1. */
8417 if (code == LE_EXPR && sgn0 == 1)
8419 /* -CST < arg1 -> -CST-1 <= arg1. */
8420 else if (code == LT_EXPR && sgn0 == -1)
8422 /* CST > arg1 -> CST-1 >= arg1. */
8423 else if (code == GT_EXPR && sgn0 == 1)
8425 /* -CST >= arg1 -> -CST-1 > arg1. */
8426 else if (code == GE_EXPR && sgn0 == -1)
8430 /* arg1 code' CST' might be more canonical. */
8435 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8437 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8439 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8440 else if (code == GT_EXPR
8441 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8443 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8444 else if (code == LE_EXPR
8445 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8447 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8448 else if (code == GE_EXPR
8449 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8453 *strict_overflow_p = true;
8456 /* Now build the constant reduced in magnitude. But not if that
8457 would produce one outside of its types range. */
8458 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8460 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8461 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8463 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8464 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8465 /* We cannot swap the comparison here as that would cause us to
8466 endlessly recurse. */
8469 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8470 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8471 if (code0 != INTEGER_CST)
8472 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8474 /* If swapping might yield to a more canonical form, do so. */
8476 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8478 return fold_build2_loc (loc, code, type, t, arg1);
8481 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8482 overflow further. Try to decrease the magnitude of constants involved
8483 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8484 and put sole constants at the second argument position.
8485 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8488 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8489 tree arg0, tree arg1)
8492 bool strict_overflow_p;
8493 const char * const warnmsg = G_("assuming signed overflow does not occur "
8494 "when reducing constant in comparison");
8496 /* Try canonicalization by simplifying arg0. */
8497 strict_overflow_p = false;
8498 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8499 &strict_overflow_p);
8502 if (strict_overflow_p)
8503 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8507 /* Try canonicalization by simplifying arg1 using the swapped
8509 code = swap_tree_comparison (code);
8510 strict_overflow_p = false;
8511 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8512 &strict_overflow_p);
8513 if (t && strict_overflow_p)
8514 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8518 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8519 space. This is used to avoid issuing overflow warnings for
8520 expressions like &p->x which can not wrap. */
8523 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8525 unsigned HOST_WIDE_INT offset_low, total_low;
8526 HOST_WIDE_INT size, offset_high, total_high;
8528 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8534 if (offset == NULL_TREE)
8539 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8543 offset_low = TREE_INT_CST_LOW (offset);
8544 offset_high = TREE_INT_CST_HIGH (offset);
8547 if (add_double_with_sign (offset_low, offset_high,
8548 bitpos / BITS_PER_UNIT, 0,
8549 &total_low, &total_high,
8553 if (total_high != 0)
8556 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8560 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8562 if (TREE_CODE (base) == ADDR_EXPR)
8564 HOST_WIDE_INT base_size;
8566 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8567 if (base_size > 0 && size < base_size)
8571 return total_low > (unsigned HOST_WIDE_INT) size;
8574 /* Subroutine of fold_binary. This routine performs all of the
8575 transformations that are common to the equality/inequality
8576 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8577 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8578 fold_binary should call fold_binary. Fold a comparison with
8579 tree code CODE and type TYPE with operands OP0 and OP1. Return
8580 the folded comparison or NULL_TREE. */
8583 fold_comparison (location_t loc, enum tree_code code, tree type,
8586 tree arg0, arg1, tem;
8591 STRIP_SIGN_NOPS (arg0);
8592 STRIP_SIGN_NOPS (arg1);
8594 tem = fold_relational_const (code, type, arg0, arg1);
8595 if (tem != NULL_TREE)
8598 /* If one arg is a real or integer constant, put it last. */
8599 if (tree_swap_operands_p (arg0, arg1, true))
8600 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8602 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8603 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8604 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8605 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8606 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8607 && (TREE_CODE (arg1) == INTEGER_CST
8608 && !TREE_OVERFLOW (arg1)))
8610 tree const1 = TREE_OPERAND (arg0, 1);
8612 tree variable = TREE_OPERAND (arg0, 0);
8615 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8617 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8618 TREE_TYPE (arg1), const2, const1);
8620 /* If the constant operation overflowed this can be
8621 simplified as a comparison against INT_MAX/INT_MIN. */
8622 if (TREE_CODE (lhs) == INTEGER_CST
8623 && TREE_OVERFLOW (lhs))
8625 int const1_sgn = tree_int_cst_sgn (const1);
8626 enum tree_code code2 = code;
8628 /* Get the sign of the constant on the lhs if the
8629 operation were VARIABLE + CONST1. */
8630 if (TREE_CODE (arg0) == MINUS_EXPR)
8631 const1_sgn = -const1_sgn;
8633 /* The sign of the constant determines if we overflowed
8634 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8635 Canonicalize to the INT_MIN overflow by swapping the comparison
8637 if (const1_sgn == -1)
8638 code2 = swap_tree_comparison (code);
8640 /* We now can look at the canonicalized case
8641 VARIABLE + 1 CODE2 INT_MIN
8642 and decide on the result. */
8643 if (code2 == LT_EXPR
8645 || code2 == EQ_EXPR)
8646 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8647 else if (code2 == NE_EXPR
8649 || code2 == GT_EXPR)
8650 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8653 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8654 && (TREE_CODE (lhs) != INTEGER_CST
8655 || !TREE_OVERFLOW (lhs)))
8657 fold_overflow_warning ("assuming signed overflow does not occur "
8658 "when changing X +- C1 cmp C2 to "
8660 WARN_STRICT_OVERFLOW_COMPARISON);
8661 return fold_build2_loc (loc, code, type, variable, lhs);
8665 /* For comparisons of pointers we can decompose it to a compile time
8666 comparison of the base objects and the offsets into the object.
8667 This requires at least one operand being an ADDR_EXPR or a
8668 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8669 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8670 && (TREE_CODE (arg0) == ADDR_EXPR
8671 || TREE_CODE (arg1) == ADDR_EXPR
8672 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8673 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8675 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8676 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8677 enum machine_mode mode;
8678 int volatilep, unsignedp;
8679 bool indirect_base0 = false, indirect_base1 = false;
8681 /* Get base and offset for the access. Strip ADDR_EXPR for
8682 get_inner_reference, but put it back by stripping INDIRECT_REF
8683 off the base object if possible. indirect_baseN will be true
8684 if baseN is not an address but refers to the object itself. */
8686 if (TREE_CODE (arg0) == ADDR_EXPR)
8688 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8689 &bitsize, &bitpos0, &offset0, &mode,
8690 &unsignedp, &volatilep, false);
8691 if (TREE_CODE (base0) == INDIRECT_REF)
8692 base0 = TREE_OPERAND (base0, 0);
8694 indirect_base0 = true;
8696 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8698 base0 = TREE_OPERAND (arg0, 0);
8699 STRIP_SIGN_NOPS (base0);
8700 if (TREE_CODE (base0) == ADDR_EXPR)
8702 base0 = TREE_OPERAND (base0, 0);
8703 indirect_base0 = true;
8705 offset0 = TREE_OPERAND (arg0, 1);
8709 if (TREE_CODE (arg1) == ADDR_EXPR)
8711 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8712 &bitsize, &bitpos1, &offset1, &mode,
8713 &unsignedp, &volatilep, false);
8714 if (TREE_CODE (base1) == INDIRECT_REF)
8715 base1 = TREE_OPERAND (base1, 0);
8717 indirect_base1 = true;
8719 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8721 base1 = TREE_OPERAND (arg1, 0);
8722 STRIP_SIGN_NOPS (base1);
8723 if (TREE_CODE (base1) == ADDR_EXPR)
8725 base1 = TREE_OPERAND (base1, 0);
8726 indirect_base1 = true;
8728 offset1 = TREE_OPERAND (arg1, 1);
8731 /* A local variable can never be pointed to by
8732 the default SSA name of an incoming parameter. */
8733 if ((TREE_CODE (arg0) == ADDR_EXPR
8735 && TREE_CODE (base0) == VAR_DECL
8736 && auto_var_in_fn_p (base0, current_function_decl)
8738 && TREE_CODE (base1) == SSA_NAME
8739 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8740 && SSA_NAME_IS_DEFAULT_DEF (base1))
8741 || (TREE_CODE (arg1) == ADDR_EXPR
8743 && TREE_CODE (base1) == VAR_DECL
8744 && auto_var_in_fn_p (base1, current_function_decl)
8746 && TREE_CODE (base0) == SSA_NAME
8747 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8748 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8750 if (code == NE_EXPR)
8751 return constant_boolean_node (1, type);
8752 else if (code == EQ_EXPR)
8753 return constant_boolean_node (0, type);
8755 /* If we have equivalent bases we might be able to simplify. */
8756 else if (indirect_base0 == indirect_base1
8757 && operand_equal_p (base0, base1, 0))
8759 /* We can fold this expression to a constant if the non-constant
8760 offset parts are equal. */
8761 if ((offset0 == offset1
8762 || (offset0 && offset1
8763 && operand_equal_p (offset0, offset1, 0)))
8766 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8771 && bitpos0 != bitpos1
8772 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8773 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8774 fold_overflow_warning (("assuming pointer wraparound does not "
8775 "occur when comparing P +- C1 with "
8777 WARN_STRICT_OVERFLOW_CONDITIONAL);
8782 return constant_boolean_node (bitpos0 == bitpos1, type);
8784 return constant_boolean_node (bitpos0 != bitpos1, type);
8786 return constant_boolean_node (bitpos0 < bitpos1, type);
8788 return constant_boolean_node (bitpos0 <= bitpos1, type);
8790 return constant_boolean_node (bitpos0 >= bitpos1, type);
8792 return constant_boolean_node (bitpos0 > bitpos1, type);
8796 /* We can simplify the comparison to a comparison of the variable
8797 offset parts if the constant offset parts are equal.
8798 Be careful to use signed size type here because otherwise we
8799 mess with array offsets in the wrong way. This is possible
8800 because pointer arithmetic is restricted to retain within an
8801 object and overflow on pointer differences is undefined as of
8802 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8803 else if (bitpos0 == bitpos1
8804 && ((code == EQ_EXPR || code == NE_EXPR)
8805 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8807 /* By converting to signed size type we cover middle-end pointer
8808 arithmetic which operates on unsigned pointer types of size
8809 type size and ARRAY_REF offsets which are properly sign or
8810 zero extended from their type in case it is narrower than
8812 if (offset0 == NULL_TREE)
8813 offset0 = build_int_cst (ssizetype, 0);
8815 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8816 if (offset1 == NULL_TREE)
8817 offset1 = build_int_cst (ssizetype, 0);
8819 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8823 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8824 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8825 fold_overflow_warning (("assuming pointer wraparound does not "
8826 "occur when comparing P +- C1 with "
8828 WARN_STRICT_OVERFLOW_COMPARISON);
8830 return fold_build2_loc (loc, code, type, offset0, offset1);
8833 /* For non-equal bases we can simplify if they are addresses
8834 of local binding decls or constants. */
8835 else if (indirect_base0 && indirect_base1
8836 /* We know that !operand_equal_p (base0, base1, 0)
8837 because the if condition was false. But make
8838 sure two decls are not the same. */
8840 && TREE_CODE (arg0) == ADDR_EXPR
8841 && TREE_CODE (arg1) == ADDR_EXPR
8842 && (((TREE_CODE (base0) == VAR_DECL
8843 || TREE_CODE (base0) == PARM_DECL)
8844 && (targetm.binds_local_p (base0)
8845 || CONSTANT_CLASS_P (base1)))
8846 || CONSTANT_CLASS_P (base0))
8847 && (((TREE_CODE (base1) == VAR_DECL
8848 || TREE_CODE (base1) == PARM_DECL)
8849 && (targetm.binds_local_p (base1)
8850 || CONSTANT_CLASS_P (base0)))
8851 || CONSTANT_CLASS_P (base1)))
8853 if (code == EQ_EXPR)
8854 return omit_two_operands_loc (loc, type, boolean_false_node,
8856 else if (code == NE_EXPR)
8857 return omit_two_operands_loc (loc, type, boolean_true_node,
8860 /* For equal offsets we can simplify to a comparison of the
8862 else if (bitpos0 == bitpos1
8864 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8866 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8867 && ((offset0 == offset1)
8868 || (offset0 && offset1
8869 && operand_equal_p (offset0, offset1, 0))))
8872 base0 = build_fold_addr_expr_loc (loc, base0);
8874 base1 = build_fold_addr_expr_loc (loc, base1);
8875 return fold_build2_loc (loc, code, type, base0, base1);
8879 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8880 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8881 the resulting offset is smaller in absolute value than the
8883 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8884 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8885 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8886 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8887 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8888 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8889 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8891 tree const1 = TREE_OPERAND (arg0, 1);
8892 tree const2 = TREE_OPERAND (arg1, 1);
8893 tree variable1 = TREE_OPERAND (arg0, 0);
8894 tree variable2 = TREE_OPERAND (arg1, 0);
8896 const char * const warnmsg = G_("assuming signed overflow does not "
8897 "occur when combining constants around "
8900 /* Put the constant on the side where it doesn't overflow and is
8901 of lower absolute value than before. */
8902 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8903 ? MINUS_EXPR : PLUS_EXPR,
8905 if (!TREE_OVERFLOW (cst)
8906 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8908 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8909 return fold_build2_loc (loc, code, type,
8911 fold_build2_loc (loc,
8912 TREE_CODE (arg1), TREE_TYPE (arg1),
8916 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8917 ? MINUS_EXPR : PLUS_EXPR,
8919 if (!TREE_OVERFLOW (cst)
8920 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8922 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8923 return fold_build2_loc (loc, code, type,
8924 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8930 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8931 signed arithmetic case. That form is created by the compiler
8932 often enough for folding it to be of value. One example is in
8933 computing loop trip counts after Operator Strength Reduction. */
8934 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8935 && TREE_CODE (arg0) == MULT_EXPR
8936 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8937 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8938 && integer_zerop (arg1))
8940 tree const1 = TREE_OPERAND (arg0, 1);
8941 tree const2 = arg1; /* zero */
8942 tree variable1 = TREE_OPERAND (arg0, 0);
8943 enum tree_code cmp_code = code;
8945 /* Handle unfolded multiplication by zero. */
8946 if (integer_zerop (const1))
8947 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8949 fold_overflow_warning (("assuming signed overflow does not occur when "
8950 "eliminating multiplication in comparison "
8952 WARN_STRICT_OVERFLOW_COMPARISON);
8954 /* If const1 is negative we swap the sense of the comparison. */
8955 if (tree_int_cst_sgn (const1) < 0)
8956 cmp_code = swap_tree_comparison (cmp_code);
8958 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8961 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8965 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8967 tree targ0 = strip_float_extensions (arg0);
8968 tree targ1 = strip_float_extensions (arg1);
8969 tree newtype = TREE_TYPE (targ0);
8971 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8972 newtype = TREE_TYPE (targ1);
8974 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8975 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8976 return fold_build2_loc (loc, code, type,
8977 fold_convert_loc (loc, newtype, targ0),
8978 fold_convert_loc (loc, newtype, targ1));
8980 /* (-a) CMP (-b) -> b CMP a */
8981 if (TREE_CODE (arg0) == NEGATE_EXPR
8982 && TREE_CODE (arg1) == NEGATE_EXPR)
8983 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8984 TREE_OPERAND (arg0, 0));
8986 if (TREE_CODE (arg1) == REAL_CST)
8988 REAL_VALUE_TYPE cst;
8989 cst = TREE_REAL_CST (arg1);
8991 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8992 if (TREE_CODE (arg0) == NEGATE_EXPR)
8993 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8994 TREE_OPERAND (arg0, 0),
8995 build_real (TREE_TYPE (arg1),
8996 real_value_negate (&cst)));
8998 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8999 /* a CMP (-0) -> a CMP 0 */
9000 if (REAL_VALUE_MINUS_ZERO (cst))
9001 return fold_build2_loc (loc, code, type, arg0,
9002 build_real (TREE_TYPE (arg1), dconst0));
9004 /* x != NaN is always true, other ops are always false. */
9005 if (REAL_VALUE_ISNAN (cst)
9006 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9008 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9009 return omit_one_operand_loc (loc, type, tem, arg0);
9012 /* Fold comparisons against infinity. */
9013 if (REAL_VALUE_ISINF (cst)
9014 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9016 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9017 if (tem != NULL_TREE)
9022 /* If this is a comparison of a real constant with a PLUS_EXPR
9023 or a MINUS_EXPR of a real constant, we can convert it into a
9024 comparison with a revised real constant as long as no overflow
9025 occurs when unsafe_math_optimizations are enabled. */
9026 if (flag_unsafe_math_optimizations
9027 && TREE_CODE (arg1) == REAL_CST
9028 && (TREE_CODE (arg0) == PLUS_EXPR
9029 || TREE_CODE (arg0) == MINUS_EXPR)
9030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9031 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9032 ? MINUS_EXPR : PLUS_EXPR,
9033 arg1, TREE_OPERAND (arg0, 1)))
9034 && !TREE_OVERFLOW (tem))
9035 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9037 /* Likewise, we can simplify a comparison of a real constant with
9038 a MINUS_EXPR whose first operand is also a real constant, i.e.
9039 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9040 floating-point types only if -fassociative-math is set. */
9041 if (flag_associative_math
9042 && TREE_CODE (arg1) == REAL_CST
9043 && TREE_CODE (arg0) == MINUS_EXPR
9044 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9045 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9047 && !TREE_OVERFLOW (tem))
9048 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9049 TREE_OPERAND (arg0, 1), tem);
9051 /* Fold comparisons against built-in math functions. */
9052 if (TREE_CODE (arg1) == REAL_CST
9053 && flag_unsafe_math_optimizations
9054 && ! flag_errno_math)
9056 enum built_in_function fcode = builtin_mathfn_code (arg0);
9058 if (fcode != END_BUILTINS)
9060 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9061 if (tem != NULL_TREE)
9067 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9068 && CONVERT_EXPR_P (arg0))
9070 /* If we are widening one operand of an integer comparison,
9071 see if the other operand is similarly being widened. Perhaps we
9072 can do the comparison in the narrower type. */
9073 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9077 /* Or if we are changing signedness. */
9078 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9083 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9084 constant, we can simplify it. */
9085 if (TREE_CODE (arg1) == INTEGER_CST
9086 && (TREE_CODE (arg0) == MIN_EXPR
9087 || TREE_CODE (arg0) == MAX_EXPR)
9088 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9090 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9095 /* Simplify comparison of something with itself. (For IEEE
9096 floating-point, we can only do some of these simplifications.) */
9097 if (operand_equal_p (arg0, arg1, 0))
9102 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9103 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9104 return constant_boolean_node (1, type);
9109 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9110 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9111 return constant_boolean_node (1, type);
9112 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9115 /* For NE, we can only do this simplification if integer
9116 or we don't honor IEEE floating point NaNs. */
9117 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9118 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9120 /* ... fall through ... */
9123 return constant_boolean_node (0, type);
9129 /* If we are comparing an expression that just has comparisons
9130 of two integer values, arithmetic expressions of those comparisons,
9131 and constants, we can simplify it. There are only three cases
9132 to check: the two values can either be equal, the first can be
9133 greater, or the second can be greater. Fold the expression for
9134 those three values. Since each value must be 0 or 1, we have
9135 eight possibilities, each of which corresponds to the constant 0
9136 or 1 or one of the six possible comparisons.
9138 This handles common cases like (a > b) == 0 but also handles
9139 expressions like ((x > y) - (y > x)) > 0, which supposedly
9140 occur in macroized code. */
9142 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9144 tree cval1 = 0, cval2 = 0;
9147 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9148 /* Don't handle degenerate cases here; they should already
9149 have been handled anyway. */
9150 && cval1 != 0 && cval2 != 0
9151 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9152 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9153 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9154 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9155 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9156 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9157 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9159 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9160 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9162 /* We can't just pass T to eval_subst in case cval1 or cval2
9163 was the same as ARG1. */
9166 = fold_build2_loc (loc, code, type,
9167 eval_subst (loc, arg0, cval1, maxval,
9171 = fold_build2_loc (loc, code, type,
9172 eval_subst (loc, arg0, cval1, maxval,
9176 = fold_build2_loc (loc, code, type,
9177 eval_subst (loc, arg0, cval1, minval,
9181 /* All three of these results should be 0 or 1. Confirm they are.
9182 Then use those values to select the proper code to use. */
9184 if (TREE_CODE (high_result) == INTEGER_CST
9185 && TREE_CODE (equal_result) == INTEGER_CST
9186 && TREE_CODE (low_result) == INTEGER_CST)
9188 /* Make a 3-bit mask with the high-order bit being the
9189 value for `>', the next for '=', and the low for '<'. */
9190 switch ((integer_onep (high_result) * 4)
9191 + (integer_onep (equal_result) * 2)
9192 + integer_onep (low_result))
9196 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9217 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9222 tem = save_expr (build2 (code, type, cval1, cval2));
9223 SET_EXPR_LOCATION (tem, loc);
9226 return fold_build2_loc (loc, code, type, cval1, cval2);
9231 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9232 into a single range test. */
9233 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9234 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9235 && TREE_CODE (arg1) == INTEGER_CST
9236 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9237 && !integer_zerop (TREE_OPERAND (arg0, 1))
9238 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9239 && !TREE_OVERFLOW (arg1))
9241 tem = fold_div_compare (loc, code, type, arg0, arg1);
9242 if (tem != NULL_TREE)
9246 /* Fold ~X op ~Y as Y op X. */
9247 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9248 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9250 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9251 return fold_build2_loc (loc, code, type,
9252 fold_convert_loc (loc, cmp_type,
9253 TREE_OPERAND (arg1, 0)),
9254 TREE_OPERAND (arg0, 0));
9257 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9258 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9259 && TREE_CODE (arg1) == INTEGER_CST)
9261 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9262 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9263 TREE_OPERAND (arg0, 0),
9264 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9265 fold_convert_loc (loc, cmp_type, arg1)));
9272 /* Subroutine of fold_binary. Optimize complex multiplications of the
9273 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9274 argument EXPR represents the expression "z" of type TYPE. */
9277 fold_mult_zconjz (location_t loc, tree type, tree expr)
9279 tree itype = TREE_TYPE (type);
9280 tree rpart, ipart, tem;
9282 if (TREE_CODE (expr) == COMPLEX_EXPR)
9284 rpart = TREE_OPERAND (expr, 0);
9285 ipart = TREE_OPERAND (expr, 1);
9287 else if (TREE_CODE (expr) == COMPLEX_CST)
9289 rpart = TREE_REALPART (expr);
9290 ipart = TREE_IMAGPART (expr);
9294 expr = save_expr (expr);
9295 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9296 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9299 rpart = save_expr (rpart);
9300 ipart = save_expr (ipart);
9301 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9302 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9303 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9304 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9305 build_zero_cst (itype));
9309 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9310 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9311 guarantees that P and N have the same least significant log2(M) bits.
9312 N is not otherwise constrained. In particular, N is not normalized to
9313 0 <= N < M as is common. In general, the precise value of P is unknown.
9314 M is chosen as large as possible such that constant N can be determined.
9316 Returns M and sets *RESIDUE to N.
9318 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9319 account. This is not always possible due to PR 35705.
9322 static unsigned HOST_WIDE_INT
9323 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9324 bool allow_func_align)
9326 enum tree_code code;
9330 code = TREE_CODE (expr);
9331 if (code == ADDR_EXPR)
9333 expr = TREE_OPERAND (expr, 0);
9334 if (handled_component_p (expr))
9336 HOST_WIDE_INT bitsize, bitpos;
9338 enum machine_mode mode;
9339 int unsignedp, volatilep;
9341 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9342 &mode, &unsignedp, &volatilep, false);
9343 *residue = bitpos / BITS_PER_UNIT;
9346 if (TREE_CODE (offset) == INTEGER_CST)
9347 *residue += TREE_INT_CST_LOW (offset);
9349 /* We don't handle more complicated offset expressions. */
9355 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9356 return DECL_ALIGN_UNIT (expr);
9358 else if (code == POINTER_PLUS_EXPR)
9361 unsigned HOST_WIDE_INT modulus;
9362 enum tree_code inner_code;
9364 op0 = TREE_OPERAND (expr, 0);
9366 modulus = get_pointer_modulus_and_residue (op0, residue,
9369 op1 = TREE_OPERAND (expr, 1);
9371 inner_code = TREE_CODE (op1);
9372 if (inner_code == INTEGER_CST)
9374 *residue += TREE_INT_CST_LOW (op1);
9377 else if (inner_code == MULT_EXPR)
9379 op1 = TREE_OPERAND (op1, 1);
9380 if (TREE_CODE (op1) == INTEGER_CST)
9382 unsigned HOST_WIDE_INT align;
9384 /* Compute the greatest power-of-2 divisor of op1. */
9385 align = TREE_INT_CST_LOW (op1);
9388 /* If align is non-zero and less than *modulus, replace
9389 *modulus with align., If align is 0, then either op1 is 0
9390 or the greatest power-of-2 divisor of op1 doesn't fit in an
9391 unsigned HOST_WIDE_INT. In either case, no additional
9392 constraint is imposed. */
9394 modulus = MIN (modulus, align);
9401 /* If we get here, we were unable to determine anything useful about the
9407 /* Fold a binary expression of code CODE and type TYPE with operands
9408 OP0 and OP1. LOC is the location of the resulting expression.
9409 Return the folded expression if folding is successful. Otherwise,
9410 return NULL_TREE. */
9413 fold_binary_loc (location_t loc,
9414 enum tree_code code, tree type, tree op0, tree op1)
9416 enum tree_code_class kind = TREE_CODE_CLASS (code);
9417 tree arg0, arg1, tem;
9418 tree t1 = NULL_TREE;
9419 bool strict_overflow_p;
9421 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9422 && TREE_CODE_LENGTH (code) == 2
9424 && op1 != NULL_TREE);
9429 /* Strip any conversions that don't change the mode. This is
9430 safe for every expression, except for a comparison expression
9431 because its signedness is derived from its operands. So, in
9432 the latter case, only strip conversions that don't change the
9433 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9436 Note that this is done as an internal manipulation within the
9437 constant folder, in order to find the simplest representation
9438 of the arguments so that their form can be studied. In any
9439 cases, the appropriate type conversions should be put back in
9440 the tree that will get out of the constant folder. */
9442 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9444 STRIP_SIGN_NOPS (arg0);
9445 STRIP_SIGN_NOPS (arg1);
9453 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9454 constant but we can't do arithmetic on them. */
9455 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9456 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9457 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9458 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9459 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9460 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9462 if (kind == tcc_binary)
9464 /* Make sure type and arg0 have the same saturating flag. */
9465 gcc_assert (TYPE_SATURATING (type)
9466 == TYPE_SATURATING (TREE_TYPE (arg0)));
9467 tem = const_binop (code, arg0, arg1);
9469 else if (kind == tcc_comparison)
9470 tem = fold_relational_const (code, type, arg0, arg1);
9474 if (tem != NULL_TREE)
9476 if (TREE_TYPE (tem) != type)
9477 tem = fold_convert_loc (loc, type, tem);
9482 /* If this is a commutative operation, and ARG0 is a constant, move it
9483 to ARG1 to reduce the number of tests below. */
9484 if (commutative_tree_code (code)
9485 && tree_swap_operands_p (arg0, arg1, true))
9486 return fold_build2_loc (loc, code, type, op1, op0);
9488 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9490 First check for cases where an arithmetic operation is applied to a
9491 compound, conditional, or comparison operation. Push the arithmetic
9492 operation inside the compound or conditional to see if any folding
9493 can then be done. Convert comparison to conditional for this purpose.
9494 The also optimizes non-constant cases that used to be done in
9497 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9498 one of the operands is a comparison and the other is a comparison, a
9499 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9500 code below would make the expression more complex. Change it to a
9501 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9502 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9504 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9505 || code == EQ_EXPR || code == NE_EXPR)
9506 && ((truth_value_p (TREE_CODE (arg0))
9507 && (truth_value_p (TREE_CODE (arg1))
9508 || (TREE_CODE (arg1) == BIT_AND_EXPR
9509 && integer_onep (TREE_OPERAND (arg1, 1)))))
9510 || (truth_value_p (TREE_CODE (arg1))
9511 && (truth_value_p (TREE_CODE (arg0))
9512 || (TREE_CODE (arg0) == BIT_AND_EXPR
9513 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9515 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9516 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9519 fold_convert_loc (loc, boolean_type_node, arg0),
9520 fold_convert_loc (loc, boolean_type_node, arg1));
9522 if (code == EQ_EXPR)
9523 tem = invert_truthvalue_loc (loc, tem);
9525 return fold_convert_loc (loc, type, tem);
9528 if (TREE_CODE_CLASS (code) == tcc_binary
9529 || TREE_CODE_CLASS (code) == tcc_comparison)
9531 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9533 tem = fold_build2_loc (loc, code, type,
9534 fold_convert_loc (loc, TREE_TYPE (op0),
9535 TREE_OPERAND (arg0, 1)), op1);
9536 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9537 goto fold_binary_exit;
9539 if (TREE_CODE (arg1) == COMPOUND_EXPR
9540 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9542 tem = fold_build2_loc (loc, code, type, op0,
9543 fold_convert_loc (loc, TREE_TYPE (op1),
9544 TREE_OPERAND (arg1, 1)));
9545 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9546 goto fold_binary_exit;
9549 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9551 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9553 /*cond_first_p=*/1);
9554 if (tem != NULL_TREE)
9558 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9560 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9562 /*cond_first_p=*/0);
9563 if (tem != NULL_TREE)
9571 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9572 if (TREE_CODE (arg0) == ADDR_EXPR
9573 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9575 tree iref = TREE_OPERAND (arg0, 0);
9576 return fold_build2 (MEM_REF, type,
9577 TREE_OPERAND (iref, 0),
9578 int_const_binop (PLUS_EXPR, arg1,
9579 TREE_OPERAND (iref, 1), 0));
9582 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9583 if (TREE_CODE (arg0) == ADDR_EXPR
9584 && handled_component_p (TREE_OPERAND (arg0, 0)))
9587 HOST_WIDE_INT coffset;
9588 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9592 return fold_build2 (MEM_REF, type,
9593 build_fold_addr_expr (base),
9594 int_const_binop (PLUS_EXPR, arg1,
9595 size_int (coffset), 0));
9600 case POINTER_PLUS_EXPR:
9601 /* 0 +p index -> (type)index */
9602 if (integer_zerop (arg0))
9603 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9605 /* PTR +p 0 -> PTR */
9606 if (integer_zerop (arg1))
9607 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9609 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9610 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9611 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9612 return fold_convert_loc (loc, type,
9613 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9614 fold_convert_loc (loc, sizetype,
9616 fold_convert_loc (loc, sizetype,
9619 /* index +p PTR -> PTR +p index */
9620 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9621 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9622 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9623 fold_convert_loc (loc, type, arg1),
9624 fold_convert_loc (loc, sizetype, arg0));
9626 /* (PTR +p B) +p A -> PTR +p (B + A) */
9627 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9630 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9631 tree arg00 = TREE_OPERAND (arg0, 0);
9632 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9633 arg01, fold_convert_loc (loc, sizetype, arg1));
9634 return fold_convert_loc (loc, type,
9635 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9640 /* PTR_CST +p CST -> CST1 */
9641 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9642 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9643 fold_convert_loc (loc, type, arg1));
9645 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9646 of the array. Loop optimizer sometimes produce this type of
9648 if (TREE_CODE (arg0) == ADDR_EXPR)
9650 tem = try_move_mult_to_index (loc, arg0,
9651 fold_convert_loc (loc, sizetype, arg1));
9653 return fold_convert_loc (loc, type, tem);
9659 /* A + (-B) -> A - B */
9660 if (TREE_CODE (arg1) == NEGATE_EXPR)
9661 return fold_build2_loc (loc, MINUS_EXPR, type,
9662 fold_convert_loc (loc, type, arg0),
9663 fold_convert_loc (loc, type,
9664 TREE_OPERAND (arg1, 0)));
9665 /* (-A) + B -> B - A */
9666 if (TREE_CODE (arg0) == NEGATE_EXPR
9667 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9668 return fold_build2_loc (loc, MINUS_EXPR, type,
9669 fold_convert_loc (loc, type, arg1),
9670 fold_convert_loc (loc, type,
9671 TREE_OPERAND (arg0, 0)));
9673 if (INTEGRAL_TYPE_P (type))
9675 /* Convert ~A + 1 to -A. */
9676 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9677 && integer_onep (arg1))
9678 return fold_build1_loc (loc, NEGATE_EXPR, type,
9679 fold_convert_loc (loc, type,
9680 TREE_OPERAND (arg0, 0)));
9683 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9684 && !TYPE_OVERFLOW_TRAPS (type))
9686 tree tem = TREE_OPERAND (arg0, 0);
9689 if (operand_equal_p (tem, arg1, 0))
9691 t1 = build_int_cst_type (type, -1);
9692 return omit_one_operand_loc (loc, type, t1, arg1);
9697 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9698 && !TYPE_OVERFLOW_TRAPS (type))
9700 tree tem = TREE_OPERAND (arg1, 0);
9703 if (operand_equal_p (arg0, tem, 0))
9705 t1 = build_int_cst_type (type, -1);
9706 return omit_one_operand_loc (loc, type, t1, arg0);
9710 /* X + (X / CST) * -CST is X % CST. */
9711 if (TREE_CODE (arg1) == MULT_EXPR
9712 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9713 && operand_equal_p (arg0,
9714 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9716 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9717 tree cst1 = TREE_OPERAND (arg1, 1);
9718 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9720 if (sum && integer_zerop (sum))
9721 return fold_convert_loc (loc, type,
9722 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9723 TREE_TYPE (arg0), arg0,
9728 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9729 same or one. Make sure type is not saturating.
9730 fold_plusminus_mult_expr will re-associate. */
9731 if ((TREE_CODE (arg0) == MULT_EXPR
9732 || TREE_CODE (arg1) == MULT_EXPR)
9733 && !TYPE_SATURATING (type)
9734 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9736 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9741 if (! FLOAT_TYPE_P (type))
9743 if (integer_zerop (arg1))
9744 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9746 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9747 with a constant, and the two constants have no bits in common,
9748 we should treat this as a BIT_IOR_EXPR since this may produce more
9750 if (TREE_CODE (arg0) == BIT_AND_EXPR
9751 && TREE_CODE (arg1) == BIT_AND_EXPR
9752 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9753 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9754 && integer_zerop (const_binop (BIT_AND_EXPR,
9755 TREE_OPERAND (arg0, 1),
9756 TREE_OPERAND (arg1, 1))))
9758 code = BIT_IOR_EXPR;
9762 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9763 (plus (plus (mult) (mult)) (foo)) so that we can
9764 take advantage of the factoring cases below. */
9765 if (((TREE_CODE (arg0) == PLUS_EXPR
9766 || TREE_CODE (arg0) == MINUS_EXPR)
9767 && TREE_CODE (arg1) == MULT_EXPR)
9768 || ((TREE_CODE (arg1) == PLUS_EXPR
9769 || TREE_CODE (arg1) == MINUS_EXPR)
9770 && TREE_CODE (arg0) == MULT_EXPR))
9772 tree parg0, parg1, parg, marg;
9773 enum tree_code pcode;
9775 if (TREE_CODE (arg1) == MULT_EXPR)
9776 parg = arg0, marg = arg1;
9778 parg = arg1, marg = arg0;
9779 pcode = TREE_CODE (parg);
9780 parg0 = TREE_OPERAND (parg, 0);
9781 parg1 = TREE_OPERAND (parg, 1);
9785 if (TREE_CODE (parg0) == MULT_EXPR
9786 && TREE_CODE (parg1) != MULT_EXPR)
9787 return fold_build2_loc (loc, pcode, type,
9788 fold_build2_loc (loc, PLUS_EXPR, type,
9789 fold_convert_loc (loc, type,
9791 fold_convert_loc (loc, type,
9793 fold_convert_loc (loc, type, parg1));
9794 if (TREE_CODE (parg0) != MULT_EXPR
9795 && TREE_CODE (parg1) == MULT_EXPR)
9797 fold_build2_loc (loc, PLUS_EXPR, type,
9798 fold_convert_loc (loc, type, parg0),
9799 fold_build2_loc (loc, pcode, type,
9800 fold_convert_loc (loc, type, marg),
9801 fold_convert_loc (loc, type,
9807 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9808 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9809 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9811 /* Likewise if the operands are reversed. */
9812 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9813 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9815 /* Convert X + -C into X - C. */
9816 if (TREE_CODE (arg1) == REAL_CST
9817 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9819 tem = fold_negate_const (arg1, type);
9820 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9821 return fold_build2_loc (loc, MINUS_EXPR, type,
9822 fold_convert_loc (loc, type, arg0),
9823 fold_convert_loc (loc, type, tem));
9826 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9827 to __complex__ ( x, y ). This is not the same for SNaNs or
9828 if signed zeros are involved. */
9829 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9830 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9831 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9833 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9834 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9835 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9836 bool arg0rz = false, arg0iz = false;
9837 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9838 || (arg0i && (arg0iz = real_zerop (arg0i))))
9840 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9841 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9842 if (arg0rz && arg1i && real_zerop (arg1i))
9844 tree rp = arg1r ? arg1r
9845 : build1 (REALPART_EXPR, rtype, arg1);
9846 tree ip = arg0i ? arg0i
9847 : build1 (IMAGPART_EXPR, rtype, arg0);
9848 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9850 else if (arg0iz && arg1r && real_zerop (arg1r))
9852 tree rp = arg0r ? arg0r
9853 : build1 (REALPART_EXPR, rtype, arg0);
9854 tree ip = arg1i ? arg1i
9855 : build1 (IMAGPART_EXPR, rtype, arg1);
9856 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9861 if (flag_unsafe_math_optimizations
9862 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9863 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9864 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9867 /* Convert x+x into x*2.0. */
9868 if (operand_equal_p (arg0, arg1, 0)
9869 && SCALAR_FLOAT_TYPE_P (type))
9870 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9871 build_real (type, dconst2));
9873 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9874 We associate floats only if the user has specified
9875 -fassociative-math. */
9876 if (flag_associative_math
9877 && TREE_CODE (arg1) == PLUS_EXPR
9878 && TREE_CODE (arg0) != MULT_EXPR)
9880 tree tree10 = TREE_OPERAND (arg1, 0);
9881 tree tree11 = TREE_OPERAND (arg1, 1);
9882 if (TREE_CODE (tree11) == MULT_EXPR
9883 && TREE_CODE (tree10) == MULT_EXPR)
9886 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9887 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9890 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9891 We associate floats only if the user has specified
9892 -fassociative-math. */
9893 if (flag_associative_math
9894 && TREE_CODE (arg0) == PLUS_EXPR
9895 && TREE_CODE (arg1) != MULT_EXPR)
9897 tree tree00 = TREE_OPERAND (arg0, 0);
9898 tree tree01 = TREE_OPERAND (arg0, 1);
9899 if (TREE_CODE (tree01) == MULT_EXPR
9900 && TREE_CODE (tree00) == MULT_EXPR)
9903 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9904 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9910 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9911 is a rotate of A by C1 bits. */
9912 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9913 is a rotate of A by B bits. */
9915 enum tree_code code0, code1;
9917 code0 = TREE_CODE (arg0);
9918 code1 = TREE_CODE (arg1);
9919 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9920 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9921 && operand_equal_p (TREE_OPERAND (arg0, 0),
9922 TREE_OPERAND (arg1, 0), 0)
9923 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9924 TYPE_UNSIGNED (rtype))
9925 /* Only create rotates in complete modes. Other cases are not
9926 expanded properly. */
9927 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9929 tree tree01, tree11;
9930 enum tree_code code01, code11;
9932 tree01 = TREE_OPERAND (arg0, 1);
9933 tree11 = TREE_OPERAND (arg1, 1);
9934 STRIP_NOPS (tree01);
9935 STRIP_NOPS (tree11);
9936 code01 = TREE_CODE (tree01);
9937 code11 = TREE_CODE (tree11);
9938 if (code01 == INTEGER_CST
9939 && code11 == INTEGER_CST
9940 && TREE_INT_CST_HIGH (tree01) == 0
9941 && TREE_INT_CST_HIGH (tree11) == 0
9942 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9943 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9945 tem = build2 (LROTATE_EXPR,
9946 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9947 TREE_OPERAND (arg0, 0),
9948 code0 == LSHIFT_EXPR
9950 SET_EXPR_LOCATION (tem, loc);
9951 return fold_convert_loc (loc, type, tem);
9953 else if (code11 == MINUS_EXPR)
9955 tree tree110, tree111;
9956 tree110 = TREE_OPERAND (tree11, 0);
9957 tree111 = TREE_OPERAND (tree11, 1);
9958 STRIP_NOPS (tree110);
9959 STRIP_NOPS (tree111);
9960 if (TREE_CODE (tree110) == INTEGER_CST
9961 && 0 == compare_tree_int (tree110,
9963 (TREE_TYPE (TREE_OPERAND
9965 && operand_equal_p (tree01, tree111, 0))
9967 fold_convert_loc (loc, type,
9968 build2 ((code0 == LSHIFT_EXPR
9971 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9972 TREE_OPERAND (arg0, 0), tree01));
9974 else if (code01 == MINUS_EXPR)
9976 tree tree010, tree011;
9977 tree010 = TREE_OPERAND (tree01, 0);
9978 tree011 = TREE_OPERAND (tree01, 1);
9979 STRIP_NOPS (tree010);
9980 STRIP_NOPS (tree011);
9981 if (TREE_CODE (tree010) == INTEGER_CST
9982 && 0 == compare_tree_int (tree010,
9984 (TREE_TYPE (TREE_OPERAND
9986 && operand_equal_p (tree11, tree011, 0))
9987 return fold_convert_loc
9989 build2 ((code0 != LSHIFT_EXPR
9992 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9993 TREE_OPERAND (arg0, 0), tree11));
9999 /* In most languages, can't associate operations on floats through
10000 parentheses. Rather than remember where the parentheses were, we
10001 don't associate floats at all, unless the user has specified
10002 -fassociative-math.
10003 And, we need to make sure type is not saturating. */
10005 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10006 && !TYPE_SATURATING (type))
10008 tree var0, con0, lit0, minus_lit0;
10009 tree var1, con1, lit1, minus_lit1;
10012 /* Split both trees into variables, constants, and literals. Then
10013 associate each group together, the constants with literals,
10014 then the result with variables. This increases the chances of
10015 literals being recombined later and of generating relocatable
10016 expressions for the sum of a constant and literal. */
10017 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10018 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10019 code == MINUS_EXPR);
10021 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10022 if (code == MINUS_EXPR)
10025 /* With undefined overflow we can only associate constants with one
10026 variable, and constants whose association doesn't overflow. */
10027 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10028 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10035 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10036 tmp0 = TREE_OPERAND (tmp0, 0);
10037 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10038 tmp1 = TREE_OPERAND (tmp1, 0);
10039 /* The only case we can still associate with two variables
10040 is if they are the same, modulo negation. */
10041 if (!operand_equal_p (tmp0, tmp1, 0))
10045 if (ok && lit0 && lit1)
10047 tree tmp0 = fold_convert (type, lit0);
10048 tree tmp1 = fold_convert (type, lit1);
10050 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10051 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10056 /* Only do something if we found more than two objects. Otherwise,
10057 nothing has changed and we risk infinite recursion. */
10059 && (2 < ((var0 != 0) + (var1 != 0)
10060 + (con0 != 0) + (con1 != 0)
10061 + (lit0 != 0) + (lit1 != 0)
10062 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10064 var0 = associate_trees (loc, var0, var1, code, type);
10065 con0 = associate_trees (loc, con0, con1, code, type);
10066 lit0 = associate_trees (loc, lit0, lit1, code, type);
10067 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10069 /* Preserve the MINUS_EXPR if the negative part of the literal is
10070 greater than the positive part. Otherwise, the multiplicative
10071 folding code (i.e extract_muldiv) may be fooled in case
10072 unsigned constants are subtracted, like in the following
10073 example: ((X*2 + 4) - 8U)/2. */
10074 if (minus_lit0 && lit0)
10076 if (TREE_CODE (lit0) == INTEGER_CST
10077 && TREE_CODE (minus_lit0) == INTEGER_CST
10078 && tree_int_cst_lt (lit0, minus_lit0))
10080 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10086 lit0 = associate_trees (loc, lit0, minus_lit0,
10095 fold_convert_loc (loc, type,
10096 associate_trees (loc, var0, minus_lit0,
10097 MINUS_EXPR, type));
10100 con0 = associate_trees (loc, con0, minus_lit0,
10103 fold_convert_loc (loc, type,
10104 associate_trees (loc, var0, con0,
10109 con0 = associate_trees (loc, con0, lit0, code, type);
10111 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10119 /* Pointer simplifications for subtraction, simple reassociations. */
10120 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10122 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10123 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10124 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10126 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10127 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10128 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10129 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10130 return fold_build2_loc (loc, PLUS_EXPR, type,
10131 fold_build2_loc (loc, MINUS_EXPR, type,
10133 fold_build2_loc (loc, MINUS_EXPR, type,
10136 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10137 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10139 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10140 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10141 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10142 fold_convert_loc (loc, type, arg1));
10144 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10147 /* A - (-B) -> A + B */
10148 if (TREE_CODE (arg1) == NEGATE_EXPR)
10149 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10150 fold_convert_loc (loc, type,
10151 TREE_OPERAND (arg1, 0)));
10152 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10153 if (TREE_CODE (arg0) == NEGATE_EXPR
10154 && (FLOAT_TYPE_P (type)
10155 || INTEGRAL_TYPE_P (type))
10156 && negate_expr_p (arg1)
10157 && reorder_operands_p (arg0, arg1))
10158 return fold_build2_loc (loc, MINUS_EXPR, type,
10159 fold_convert_loc (loc, type,
10160 negate_expr (arg1)),
10161 fold_convert_loc (loc, type,
10162 TREE_OPERAND (arg0, 0)));
10163 /* Convert -A - 1 to ~A. */
10164 if (INTEGRAL_TYPE_P (type)
10165 && TREE_CODE (arg0) == NEGATE_EXPR
10166 && integer_onep (arg1)
10167 && !TYPE_OVERFLOW_TRAPS (type))
10168 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10169 fold_convert_loc (loc, type,
10170 TREE_OPERAND (arg0, 0)));
10172 /* Convert -1 - A to ~A. */
10173 if (INTEGRAL_TYPE_P (type)
10174 && integer_all_onesp (arg0))
10175 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10178 /* X - (X / CST) * CST is X % CST. */
10179 if (INTEGRAL_TYPE_P (type)
10180 && TREE_CODE (arg1) == MULT_EXPR
10181 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10182 && operand_equal_p (arg0,
10183 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10184 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10185 TREE_OPERAND (arg1, 1), 0))
10187 fold_convert_loc (loc, type,
10188 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10189 arg0, TREE_OPERAND (arg1, 1)));
10191 if (! FLOAT_TYPE_P (type))
10193 if (integer_zerop (arg0))
10194 return negate_expr (fold_convert_loc (loc, type, arg1));
10195 if (integer_zerop (arg1))
10196 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10198 /* Fold A - (A & B) into ~B & A. */
10199 if (!TREE_SIDE_EFFECTS (arg0)
10200 && TREE_CODE (arg1) == BIT_AND_EXPR)
10202 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10204 tree arg10 = fold_convert_loc (loc, type,
10205 TREE_OPERAND (arg1, 0));
10206 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10207 fold_build1_loc (loc, BIT_NOT_EXPR,
10209 fold_convert_loc (loc, type, arg0));
10211 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10213 tree arg11 = fold_convert_loc (loc,
10214 type, TREE_OPERAND (arg1, 1));
10215 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10216 fold_build1_loc (loc, BIT_NOT_EXPR,
10218 fold_convert_loc (loc, type, arg0));
10222 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10223 any power of 2 minus 1. */
10224 if (TREE_CODE (arg0) == BIT_AND_EXPR
10225 && TREE_CODE (arg1) == BIT_AND_EXPR
10226 && operand_equal_p (TREE_OPERAND (arg0, 0),
10227 TREE_OPERAND (arg1, 0), 0))
10229 tree mask0 = TREE_OPERAND (arg0, 1);
10230 tree mask1 = TREE_OPERAND (arg1, 1);
10231 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10233 if (operand_equal_p (tem, mask1, 0))
10235 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10236 TREE_OPERAND (arg0, 0), mask1);
10237 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10242 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10243 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10244 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10246 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10247 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10248 (-ARG1 + ARG0) reduces to -ARG1. */
10249 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10250 return negate_expr (fold_convert_loc (loc, type, arg1));
10252 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10253 __complex__ ( x, -y ). This is not the same for SNaNs or if
10254 signed zeros are involved. */
10255 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10256 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10257 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10259 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10260 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10261 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10262 bool arg0rz = false, arg0iz = false;
10263 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10264 || (arg0i && (arg0iz = real_zerop (arg0i))))
10266 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10267 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10268 if (arg0rz && arg1i && real_zerop (arg1i))
10270 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10272 : build1 (REALPART_EXPR, rtype, arg1));
10273 tree ip = arg0i ? arg0i
10274 : build1 (IMAGPART_EXPR, rtype, arg0);
10275 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10277 else if (arg0iz && arg1r && real_zerop (arg1r))
10279 tree rp = arg0r ? arg0r
10280 : build1 (REALPART_EXPR, rtype, arg0);
10281 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10283 : build1 (IMAGPART_EXPR, rtype, arg1));
10284 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10289 /* Fold &x - &x. This can happen from &x.foo - &x.
10290 This is unsafe for certain floats even in non-IEEE formats.
10291 In IEEE, it is unsafe because it does wrong for NaNs.
10292 Also note that operand_equal_p is always false if an operand
10295 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10296 && operand_equal_p (arg0, arg1, 0))
10297 return build_zero_cst (type);
10299 /* A - B -> A + (-B) if B is easily negatable. */
10300 if (negate_expr_p (arg1)
10301 && ((FLOAT_TYPE_P (type)
10302 /* Avoid this transformation if B is a positive REAL_CST. */
10303 && (TREE_CODE (arg1) != REAL_CST
10304 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10305 || INTEGRAL_TYPE_P (type)))
10306 return fold_build2_loc (loc, PLUS_EXPR, type,
10307 fold_convert_loc (loc, type, arg0),
10308 fold_convert_loc (loc, type,
10309 negate_expr (arg1)));
10311 /* Try folding difference of addresses. */
10313 HOST_WIDE_INT diff;
10315 if ((TREE_CODE (arg0) == ADDR_EXPR
10316 || TREE_CODE (arg1) == ADDR_EXPR)
10317 && ptr_difference_const (arg0, arg1, &diff))
10318 return build_int_cst_type (type, diff);
10321 /* Fold &a[i] - &a[j] to i-j. */
10322 if (TREE_CODE (arg0) == ADDR_EXPR
10323 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10324 && TREE_CODE (arg1) == ADDR_EXPR
10325 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10327 tree aref0 = TREE_OPERAND (arg0, 0);
10328 tree aref1 = TREE_OPERAND (arg1, 0);
10329 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10330 TREE_OPERAND (aref1, 0), 0))
10332 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10333 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10334 tree esz = array_ref_element_size (aref0);
10335 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10336 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10337 fold_convert_loc (loc, type, esz));
10342 if (FLOAT_TYPE_P (type)
10343 && flag_unsafe_math_optimizations
10344 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10345 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10346 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10349 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10350 same or one. Make sure type is not saturating.
10351 fold_plusminus_mult_expr will re-associate. */
10352 if ((TREE_CODE (arg0) == MULT_EXPR
10353 || TREE_CODE (arg1) == MULT_EXPR)
10354 && !TYPE_SATURATING (type)
10355 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10357 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10365 /* (-A) * (-B) -> A * B */
10366 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10367 return fold_build2_loc (loc, MULT_EXPR, type,
10368 fold_convert_loc (loc, type,
10369 TREE_OPERAND (arg0, 0)),
10370 fold_convert_loc (loc, type,
10371 negate_expr (arg1)));
10372 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10373 return fold_build2_loc (loc, MULT_EXPR, type,
10374 fold_convert_loc (loc, type,
10375 negate_expr (arg0)),
10376 fold_convert_loc (loc, type,
10377 TREE_OPERAND (arg1, 0)));
10379 if (! FLOAT_TYPE_P (type))
10381 if (integer_zerop (arg1))
10382 return omit_one_operand_loc (loc, type, arg1, arg0);
10383 if (integer_onep (arg1))
10384 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10385 /* Transform x * -1 into -x. Make sure to do the negation
10386 on the original operand with conversions not stripped
10387 because we can only strip non-sign-changing conversions. */
10388 if (integer_all_onesp (arg1))
10389 return fold_convert_loc (loc, type, negate_expr (op0));
10390 /* Transform x * -C into -x * C if x is easily negatable. */
10391 if (TREE_CODE (arg1) == INTEGER_CST
10392 && tree_int_cst_sgn (arg1) == -1
10393 && negate_expr_p (arg0)
10394 && (tem = negate_expr (arg1)) != arg1
10395 && !TREE_OVERFLOW (tem))
10396 return fold_build2_loc (loc, MULT_EXPR, type,
10397 fold_convert_loc (loc, type,
10398 negate_expr (arg0)),
10401 /* (a * (1 << b)) is (a << b) */
10402 if (TREE_CODE (arg1) == LSHIFT_EXPR
10403 && integer_onep (TREE_OPERAND (arg1, 0)))
10404 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10405 TREE_OPERAND (arg1, 1));
10406 if (TREE_CODE (arg0) == LSHIFT_EXPR
10407 && integer_onep (TREE_OPERAND (arg0, 0)))
10408 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10409 TREE_OPERAND (arg0, 1));
10411 /* (A + A) * C -> A * 2 * C */
10412 if (TREE_CODE (arg0) == PLUS_EXPR
10413 && TREE_CODE (arg1) == INTEGER_CST
10414 && operand_equal_p (TREE_OPERAND (arg0, 0),
10415 TREE_OPERAND (arg0, 1), 0))
10416 return fold_build2_loc (loc, MULT_EXPR, type,
10417 omit_one_operand_loc (loc, type,
10418 TREE_OPERAND (arg0, 0),
10419 TREE_OPERAND (arg0, 1)),
10420 fold_build2_loc (loc, MULT_EXPR, type,
10421 build_int_cst (type, 2) , arg1));
10423 strict_overflow_p = false;
10424 if (TREE_CODE (arg1) == INTEGER_CST
10425 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10426 &strict_overflow_p)))
10428 if (strict_overflow_p)
10429 fold_overflow_warning (("assuming signed overflow does not "
10430 "occur when simplifying "
10432 WARN_STRICT_OVERFLOW_MISC);
10433 return fold_convert_loc (loc, type, tem);
10436 /* Optimize z * conj(z) for integer complex numbers. */
10437 if (TREE_CODE (arg0) == CONJ_EXPR
10438 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10439 return fold_mult_zconjz (loc, type, arg1);
10440 if (TREE_CODE (arg1) == CONJ_EXPR
10441 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10442 return fold_mult_zconjz (loc, type, arg0);
10446 /* Maybe fold x * 0 to 0. The expressions aren't the same
10447 when x is NaN, since x * 0 is also NaN. Nor are they the
10448 same in modes with signed zeros, since multiplying a
10449 negative value by 0 gives -0, not +0. */
10450 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10451 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10452 && real_zerop (arg1))
10453 return omit_one_operand_loc (loc, type, arg1, arg0);
10454 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10455 Likewise for complex arithmetic with signed zeros. */
10456 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10457 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10458 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10459 && real_onep (arg1))
10460 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10462 /* Transform x * -1.0 into -x. */
10463 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10464 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10465 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10466 && real_minus_onep (arg1))
10467 return fold_convert_loc (loc, type, negate_expr (arg0));
10469 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10470 the result for floating point types due to rounding so it is applied
10471 only if -fassociative-math was specify. */
10472 if (flag_associative_math
10473 && TREE_CODE (arg0) == RDIV_EXPR
10474 && TREE_CODE (arg1) == REAL_CST
10475 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10477 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10480 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10481 TREE_OPERAND (arg0, 1));
10484 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10485 if (operand_equal_p (arg0, arg1, 0))
10487 tree tem = fold_strip_sign_ops (arg0);
10488 if (tem != NULL_TREE)
10490 tem = fold_convert_loc (loc, type, tem);
10491 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10495 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10496 This is not the same for NaNs or if signed zeros are
10498 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10499 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10500 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10501 && TREE_CODE (arg1) == COMPLEX_CST
10502 && real_zerop (TREE_REALPART (arg1)))
10504 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10505 if (real_onep (TREE_IMAGPART (arg1)))
10507 fold_build2_loc (loc, COMPLEX_EXPR, type,
10508 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10510 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10511 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10513 fold_build2_loc (loc, COMPLEX_EXPR, type,
10514 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10515 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10519 /* Optimize z * conj(z) for floating point complex numbers.
10520 Guarded by flag_unsafe_math_optimizations as non-finite
10521 imaginary components don't produce scalar results. */
10522 if (flag_unsafe_math_optimizations
10523 && TREE_CODE (arg0) == CONJ_EXPR
10524 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10525 return fold_mult_zconjz (loc, type, arg1);
10526 if (flag_unsafe_math_optimizations
10527 && TREE_CODE (arg1) == CONJ_EXPR
10528 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10529 return fold_mult_zconjz (loc, type, arg0);
10531 if (flag_unsafe_math_optimizations)
10533 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10534 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10536 /* Optimizations of root(...)*root(...). */
10537 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10540 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10541 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10543 /* Optimize sqrt(x)*sqrt(x) as x. */
10544 if (BUILTIN_SQRT_P (fcode0)
10545 && operand_equal_p (arg00, arg10, 0)
10546 && ! HONOR_SNANS (TYPE_MODE (type)))
10549 /* Optimize root(x)*root(y) as root(x*y). */
10550 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10551 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10552 return build_call_expr_loc (loc, rootfn, 1, arg);
10555 /* Optimize expN(x)*expN(y) as expN(x+y). */
10556 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10558 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10559 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10560 CALL_EXPR_ARG (arg0, 0),
10561 CALL_EXPR_ARG (arg1, 0));
10562 return build_call_expr_loc (loc, expfn, 1, arg);
10565 /* Optimizations of pow(...)*pow(...). */
10566 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10567 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10568 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10570 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10571 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10572 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10573 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10575 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10576 if (operand_equal_p (arg01, arg11, 0))
10578 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10579 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10581 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10584 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10585 if (operand_equal_p (arg00, arg10, 0))
10587 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10588 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10590 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10594 /* Optimize tan(x)*cos(x) as sin(x). */
10595 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10596 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10597 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10598 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10599 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10600 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10601 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10602 CALL_EXPR_ARG (arg1, 0), 0))
10604 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10606 if (sinfn != NULL_TREE)
10607 return build_call_expr_loc (loc, sinfn, 1,
10608 CALL_EXPR_ARG (arg0, 0));
10611 /* Optimize x*pow(x,c) as pow(x,c+1). */
10612 if (fcode1 == BUILT_IN_POW
10613 || fcode1 == BUILT_IN_POWF
10614 || fcode1 == BUILT_IN_POWL)
10616 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10617 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10618 if (TREE_CODE (arg11) == REAL_CST
10619 && !TREE_OVERFLOW (arg11)
10620 && operand_equal_p (arg0, arg10, 0))
10622 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10626 c = TREE_REAL_CST (arg11);
10627 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10628 arg = build_real (type, c);
10629 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10633 /* Optimize pow(x,c)*x as pow(x,c+1). */
10634 if (fcode0 == BUILT_IN_POW
10635 || fcode0 == BUILT_IN_POWF
10636 || fcode0 == BUILT_IN_POWL)
10638 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10639 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10640 if (TREE_CODE (arg01) == REAL_CST
10641 && !TREE_OVERFLOW (arg01)
10642 && operand_equal_p (arg1, arg00, 0))
10644 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10648 c = TREE_REAL_CST (arg01);
10649 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10650 arg = build_real (type, c);
10651 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10655 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10656 if (optimize_function_for_speed_p (cfun)
10657 && operand_equal_p (arg0, arg1, 0))
10659 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10663 tree arg = build_real (type, dconst2);
10664 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10673 if (integer_all_onesp (arg1))
10674 return omit_one_operand_loc (loc, type, arg1, arg0);
10675 if (integer_zerop (arg1))
10676 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10677 if (operand_equal_p (arg0, arg1, 0))
10678 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10680 /* ~X | X is -1. */
10681 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10682 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10684 t1 = build_zero_cst (type);
10685 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10686 return omit_one_operand_loc (loc, type, t1, arg1);
10689 /* X | ~X is -1. */
10690 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10693 t1 = build_zero_cst (type);
10694 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10695 return omit_one_operand_loc (loc, type, t1, arg0);
10698 /* Canonicalize (X & C1) | C2. */
10699 if (TREE_CODE (arg0) == BIT_AND_EXPR
10700 && TREE_CODE (arg1) == INTEGER_CST
10701 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10703 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10704 int width = TYPE_PRECISION (type), w;
10705 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10706 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10707 hi2 = TREE_INT_CST_HIGH (arg1);
10708 lo2 = TREE_INT_CST_LOW (arg1);
10710 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10711 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10712 return omit_one_operand_loc (loc, type, arg1,
10713 TREE_OPERAND (arg0, 0));
10715 if (width > HOST_BITS_PER_WIDE_INT)
10717 mhi = (unsigned HOST_WIDE_INT) -1
10718 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10724 mlo = (unsigned HOST_WIDE_INT) -1
10725 >> (HOST_BITS_PER_WIDE_INT - width);
10728 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10729 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10730 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10731 TREE_OPERAND (arg0, 0), arg1);
10733 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10734 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10735 mode which allows further optimizations. */
10742 for (w = BITS_PER_UNIT;
10743 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10746 unsigned HOST_WIDE_INT mask
10747 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10748 if (((lo1 | lo2) & mask) == mask
10749 && (lo1 & ~mask) == 0 && hi1 == 0)
10756 if (hi3 != hi1 || lo3 != lo1)
10757 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10758 fold_build2_loc (loc, BIT_AND_EXPR, type,
10759 TREE_OPERAND (arg0, 0),
10760 build_int_cst_wide (type,
10765 /* (X & Y) | Y is (X, Y). */
10766 if (TREE_CODE (arg0) == BIT_AND_EXPR
10767 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10768 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10769 /* (X & Y) | X is (Y, X). */
10770 if (TREE_CODE (arg0) == BIT_AND_EXPR
10771 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10772 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10773 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10774 /* X | (X & Y) is (Y, X). */
10775 if (TREE_CODE (arg1) == BIT_AND_EXPR
10776 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10777 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10778 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10779 /* X | (Y & X) is (Y, X). */
10780 if (TREE_CODE (arg1) == BIT_AND_EXPR
10781 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10782 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10783 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10785 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10786 if (t1 != NULL_TREE)
10789 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10791 This results in more efficient code for machines without a NAND
10792 instruction. Combine will canonicalize to the first form
10793 which will allow use of NAND instructions provided by the
10794 backend if they exist. */
10795 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10796 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10799 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10800 build2 (BIT_AND_EXPR, type,
10801 fold_convert_loc (loc, type,
10802 TREE_OPERAND (arg0, 0)),
10803 fold_convert_loc (loc, type,
10804 TREE_OPERAND (arg1, 0))));
10807 /* See if this can be simplified into a rotate first. If that
10808 is unsuccessful continue in the association code. */
10812 if (integer_zerop (arg1))
10813 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10814 if (integer_all_onesp (arg1))
10815 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10816 if (operand_equal_p (arg0, arg1, 0))
10817 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10819 /* ~X ^ X is -1. */
10820 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10821 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10823 t1 = build_zero_cst (type);
10824 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10825 return omit_one_operand_loc (loc, type, t1, arg1);
10828 /* X ^ ~X is -1. */
10829 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10830 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10832 t1 = build_zero_cst (type);
10833 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10834 return omit_one_operand_loc (loc, type, t1, arg0);
10837 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10838 with a constant, and the two constants have no bits in common,
10839 we should treat this as a BIT_IOR_EXPR since this may produce more
10840 simplifications. */
10841 if (TREE_CODE (arg0) == BIT_AND_EXPR
10842 && TREE_CODE (arg1) == BIT_AND_EXPR
10843 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10844 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10845 && integer_zerop (const_binop (BIT_AND_EXPR,
10846 TREE_OPERAND (arg0, 1),
10847 TREE_OPERAND (arg1, 1))))
10849 code = BIT_IOR_EXPR;
10853 /* (X | Y) ^ X -> Y & ~ X*/
10854 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10855 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10857 tree t2 = TREE_OPERAND (arg0, 1);
10858 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10860 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10861 fold_convert_loc (loc, type, t2),
10862 fold_convert_loc (loc, type, t1));
10866 /* (Y | X) ^ X -> Y & ~ X*/
10867 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10868 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10870 tree t2 = TREE_OPERAND (arg0, 0);
10871 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10873 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10874 fold_convert_loc (loc, type, t2),
10875 fold_convert_loc (loc, type, t1));
10879 /* X ^ (X | Y) -> Y & ~ X*/
10880 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10881 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10883 tree t2 = TREE_OPERAND (arg1, 1);
10884 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10886 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10887 fold_convert_loc (loc, type, t2),
10888 fold_convert_loc (loc, type, t1));
10892 /* X ^ (Y | X) -> Y & ~ X*/
10893 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10894 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10896 tree t2 = TREE_OPERAND (arg1, 0);
10897 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10899 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10900 fold_convert_loc (loc, type, t2),
10901 fold_convert_loc (loc, type, t1));
10905 /* Convert ~X ^ ~Y to X ^ Y. */
10906 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10907 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10908 return fold_build2_loc (loc, code, type,
10909 fold_convert_loc (loc, type,
10910 TREE_OPERAND (arg0, 0)),
10911 fold_convert_loc (loc, type,
10912 TREE_OPERAND (arg1, 0)));
10914 /* Convert ~X ^ C to X ^ ~C. */
10915 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10916 && TREE_CODE (arg1) == INTEGER_CST)
10917 return fold_build2_loc (loc, code, type,
10918 fold_convert_loc (loc, type,
10919 TREE_OPERAND (arg0, 0)),
10920 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10922 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10923 if (TREE_CODE (arg0) == BIT_AND_EXPR
10924 && integer_onep (TREE_OPERAND (arg0, 1))
10925 && integer_onep (arg1))
10926 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10927 build_int_cst (TREE_TYPE (arg0), 0));
10929 /* Fold (X & Y) ^ Y as ~X & Y. */
10930 if (TREE_CODE (arg0) == BIT_AND_EXPR
10931 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10933 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10934 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10935 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10936 fold_convert_loc (loc, type, arg1));
10938 /* Fold (X & Y) ^ X as ~Y & X. */
10939 if (TREE_CODE (arg0) == BIT_AND_EXPR
10940 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10941 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10943 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10944 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10945 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10946 fold_convert_loc (loc, type, arg1));
10948 /* Fold X ^ (X & Y) as X & ~Y. */
10949 if (TREE_CODE (arg1) == BIT_AND_EXPR
10950 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10952 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10953 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10954 fold_convert_loc (loc, type, arg0),
10955 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10957 /* Fold X ^ (Y & X) as ~Y & X. */
10958 if (TREE_CODE (arg1) == BIT_AND_EXPR
10959 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10960 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10962 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10963 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10964 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10965 fold_convert_loc (loc, type, arg0));
10968 /* See if this can be simplified into a rotate first. If that
10969 is unsuccessful continue in the association code. */
10973 if (integer_all_onesp (arg1))
10974 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10975 if (integer_zerop (arg1))
10976 return omit_one_operand_loc (loc, type, arg1, arg0);
10977 if (operand_equal_p (arg0, arg1, 0))
10978 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10980 /* ~X & X is always zero. */
10981 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10982 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10983 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10985 /* X & ~X is always zero. */
10986 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10987 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10988 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10990 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10991 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10992 && TREE_CODE (arg1) == INTEGER_CST
10993 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10995 tree tmp1 = fold_convert_loc (loc, type, arg1);
10996 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10997 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10998 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10999 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11001 fold_convert_loc (loc, type,
11002 fold_build2_loc (loc, BIT_IOR_EXPR,
11003 type, tmp2, tmp3));
11006 /* (X | Y) & Y is (X, Y). */
11007 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11008 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11009 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11010 /* (X | Y) & X is (Y, X). */
11011 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11012 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11013 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11014 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11015 /* X & (X | Y) is (Y, X). */
11016 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11018 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11019 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11020 /* X & (Y | X) is (Y, X). */
11021 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11022 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11023 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11024 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11026 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11027 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11028 && integer_onep (TREE_OPERAND (arg0, 1))
11029 && integer_onep (arg1))
11031 tem = TREE_OPERAND (arg0, 0);
11032 return fold_build2_loc (loc, EQ_EXPR, type,
11033 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11034 build_int_cst (TREE_TYPE (tem), 1)),
11035 build_int_cst (TREE_TYPE (tem), 0));
11037 /* Fold ~X & 1 as (X & 1) == 0. */
11038 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11039 && integer_onep (arg1))
11041 tem = TREE_OPERAND (arg0, 0);
11042 return fold_build2_loc (loc, EQ_EXPR, type,
11043 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11044 build_int_cst (TREE_TYPE (tem), 1)),
11045 build_int_cst (TREE_TYPE (tem), 0));
11048 /* Fold (X ^ Y) & Y as ~X & Y. */
11049 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11050 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11052 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11053 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11054 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11055 fold_convert_loc (loc, type, arg1));
11057 /* Fold (X ^ Y) & X as ~Y & X. */
11058 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11059 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11060 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11062 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11063 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11064 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11065 fold_convert_loc (loc, type, arg1));
11067 /* Fold X & (X ^ Y) as X & ~Y. */
11068 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11069 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11071 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11072 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11073 fold_convert_loc (loc, type, arg0),
11074 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11076 /* Fold X & (Y ^ X) as ~Y & X. */
11077 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11078 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11079 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11081 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11082 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11083 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11084 fold_convert_loc (loc, type, arg0));
11087 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11088 ((A & N) + B) & M -> (A + B) & M
11089 Similarly if (N & M) == 0,
11090 ((A | N) + B) & M -> (A + B) & M
11091 and for - instead of + (or unary - instead of +)
11092 and/or ^ instead of |.
11093 If B is constant and (B & M) == 0, fold into A & M. */
11094 if (host_integerp (arg1, 1))
11096 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11097 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11098 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11099 && (TREE_CODE (arg0) == PLUS_EXPR
11100 || TREE_CODE (arg0) == MINUS_EXPR
11101 || TREE_CODE (arg0) == NEGATE_EXPR)
11102 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11103 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11107 unsigned HOST_WIDE_INT cst0;
11109 /* Now we know that arg0 is (C + D) or (C - D) or
11110 -C and arg1 (M) is == (1LL << cst) - 1.
11111 Store C into PMOP[0] and D into PMOP[1]. */
11112 pmop[0] = TREE_OPERAND (arg0, 0);
11114 if (TREE_CODE (arg0) != NEGATE_EXPR)
11116 pmop[1] = TREE_OPERAND (arg0, 1);
11120 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11121 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11125 for (; which >= 0; which--)
11126 switch (TREE_CODE (pmop[which]))
11131 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11134 /* tree_low_cst not used, because we don't care about
11136 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11138 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11143 else if (cst0 != 0)
11145 /* If C or D is of the form (A & N) where
11146 (N & M) == M, or of the form (A | N) or
11147 (A ^ N) where (N & M) == 0, replace it with A. */
11148 pmop[which] = TREE_OPERAND (pmop[which], 0);
11151 /* If C or D is a N where (N & M) == 0, it can be
11152 omitted (assumed 0). */
11153 if ((TREE_CODE (arg0) == PLUS_EXPR
11154 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11155 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11156 pmop[which] = NULL;
11162 /* Only build anything new if we optimized one or both arguments
11164 if (pmop[0] != TREE_OPERAND (arg0, 0)
11165 || (TREE_CODE (arg0) != NEGATE_EXPR
11166 && pmop[1] != TREE_OPERAND (arg0, 1)))
11168 tree utype = TREE_TYPE (arg0);
11169 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11171 /* Perform the operations in a type that has defined
11172 overflow behavior. */
11173 utype = unsigned_type_for (TREE_TYPE (arg0));
11174 if (pmop[0] != NULL)
11175 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11176 if (pmop[1] != NULL)
11177 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11180 if (TREE_CODE (arg0) == NEGATE_EXPR)
11181 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11182 else if (TREE_CODE (arg0) == PLUS_EXPR)
11184 if (pmop[0] != NULL && pmop[1] != NULL)
11185 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11187 else if (pmop[0] != NULL)
11189 else if (pmop[1] != NULL)
11192 return build_int_cst (type, 0);
11194 else if (pmop[0] == NULL)
11195 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11197 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11199 /* TEM is now the new binary +, - or unary - replacement. */
11200 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11201 fold_convert_loc (loc, utype, arg1));
11202 return fold_convert_loc (loc, type, tem);
11207 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11208 if (t1 != NULL_TREE)
11210 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11211 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11212 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11215 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11217 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11218 && (~TREE_INT_CST_LOW (arg1)
11219 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11221 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11224 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11226 This results in more efficient code for machines without a NOR
11227 instruction. Combine will canonicalize to the first form
11228 which will allow use of NOR instructions provided by the
11229 backend if they exist. */
11230 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11231 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11233 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11234 build2 (BIT_IOR_EXPR, type,
11235 fold_convert_loc (loc, type,
11236 TREE_OPERAND (arg0, 0)),
11237 fold_convert_loc (loc, type,
11238 TREE_OPERAND (arg1, 0))));
11241 /* If arg0 is derived from the address of an object or function, we may
11242 be able to fold this expression using the object or function's
11244 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11246 unsigned HOST_WIDE_INT modulus, residue;
11247 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11249 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11250 integer_onep (arg1));
11252 /* This works because modulus is a power of 2. If this weren't the
11253 case, we'd have to replace it by its greatest power-of-2
11254 divisor: modulus & -modulus. */
11256 return build_int_cst (type, residue & low);
11259 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11260 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11261 if the new mask might be further optimized. */
11262 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11263 || TREE_CODE (arg0) == RSHIFT_EXPR)
11264 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11265 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11266 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11267 < TYPE_PRECISION (TREE_TYPE (arg0))
11268 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11269 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11271 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11272 unsigned HOST_WIDE_INT mask
11273 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11274 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11275 tree shift_type = TREE_TYPE (arg0);
11277 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11278 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11279 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11280 && TYPE_PRECISION (TREE_TYPE (arg0))
11281 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11283 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11284 tree arg00 = TREE_OPERAND (arg0, 0);
11285 /* See if more bits can be proven as zero because of
11287 if (TREE_CODE (arg00) == NOP_EXPR
11288 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11290 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11291 if (TYPE_PRECISION (inner_type)
11292 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11293 && TYPE_PRECISION (inner_type) < prec)
11295 prec = TYPE_PRECISION (inner_type);
11296 /* See if we can shorten the right shift. */
11298 shift_type = inner_type;
11301 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11302 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11303 zerobits <<= prec - shiftc;
11304 /* For arithmetic shift if sign bit could be set, zerobits
11305 can contain actually sign bits, so no transformation is
11306 possible, unless MASK masks them all away. In that
11307 case the shift needs to be converted into logical shift. */
11308 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11309 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11311 if ((mask & zerobits) == 0)
11312 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11318 /* ((X << 16) & 0xff00) is (X, 0). */
11319 if ((mask & zerobits) == mask)
11320 return omit_one_operand_loc (loc, type,
11321 build_int_cst (type, 0), arg0);
11323 newmask = mask | zerobits;
11324 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11328 /* Only do the transformation if NEWMASK is some integer
11330 for (prec = BITS_PER_UNIT;
11331 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11332 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11334 if (prec < HOST_BITS_PER_WIDE_INT
11335 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11339 if (shift_type != TREE_TYPE (arg0))
11341 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11342 fold_convert_loc (loc, shift_type,
11343 TREE_OPERAND (arg0, 0)),
11344 TREE_OPERAND (arg0, 1));
11345 tem = fold_convert_loc (loc, type, tem);
11349 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11350 if (!tree_int_cst_equal (newmaskt, arg1))
11351 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11359 /* Don't touch a floating-point divide by zero unless the mode
11360 of the constant can represent infinity. */
11361 if (TREE_CODE (arg1) == REAL_CST
11362 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11363 && real_zerop (arg1))
11366 /* Optimize A / A to 1.0 if we don't care about
11367 NaNs or Infinities. Skip the transformation
11368 for non-real operands. */
11369 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11370 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11371 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11372 && operand_equal_p (arg0, arg1, 0))
11374 tree r = build_real (TREE_TYPE (arg0), dconst1);
11376 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11379 /* The complex version of the above A / A optimization. */
11380 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11381 && operand_equal_p (arg0, arg1, 0))
11383 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11384 if (! HONOR_NANS (TYPE_MODE (elem_type))
11385 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11387 tree r = build_real (elem_type, dconst1);
11388 /* omit_two_operands will call fold_convert for us. */
11389 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11393 /* (-A) / (-B) -> A / B */
11394 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11395 return fold_build2_loc (loc, RDIV_EXPR, type,
11396 TREE_OPERAND (arg0, 0),
11397 negate_expr (arg1));
11398 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11399 return fold_build2_loc (loc, RDIV_EXPR, type,
11400 negate_expr (arg0),
11401 TREE_OPERAND (arg1, 0));
11403 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11404 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11405 && real_onep (arg1))
11406 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11408 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11409 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11410 && real_minus_onep (arg1))
11411 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11412 negate_expr (arg0)));
11414 /* If ARG1 is a constant, we can convert this to a multiply by the
11415 reciprocal. This does not have the same rounding properties,
11416 so only do this if -freciprocal-math. We can actually
11417 always safely do it if ARG1 is a power of two, but it's hard to
11418 tell if it is or not in a portable manner. */
11419 if (TREE_CODE (arg1) == REAL_CST)
11421 if (flag_reciprocal_math
11422 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11424 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11425 /* Find the reciprocal if optimizing and the result is exact. */
11429 r = TREE_REAL_CST (arg1);
11430 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11432 tem = build_real (type, r);
11433 return fold_build2_loc (loc, MULT_EXPR, type,
11434 fold_convert_loc (loc, type, arg0), tem);
11438 /* Convert A/B/C to A/(B*C). */
11439 if (flag_reciprocal_math
11440 && TREE_CODE (arg0) == RDIV_EXPR)
11441 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11442 fold_build2_loc (loc, MULT_EXPR, type,
11443 TREE_OPERAND (arg0, 1), arg1));
11445 /* Convert A/(B/C) to (A/B)*C. */
11446 if (flag_reciprocal_math
11447 && TREE_CODE (arg1) == RDIV_EXPR)
11448 return fold_build2_loc (loc, MULT_EXPR, type,
11449 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11450 TREE_OPERAND (arg1, 0)),
11451 TREE_OPERAND (arg1, 1));
11453 /* Convert C1/(X*C2) into (C1/C2)/X. */
11454 if (flag_reciprocal_math
11455 && TREE_CODE (arg1) == MULT_EXPR
11456 && TREE_CODE (arg0) == REAL_CST
11457 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11459 tree tem = const_binop (RDIV_EXPR, arg0,
11460 TREE_OPERAND (arg1, 1));
11462 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11463 TREE_OPERAND (arg1, 0));
11466 if (flag_unsafe_math_optimizations)
11468 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11469 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11471 /* Optimize sin(x)/cos(x) as tan(x). */
11472 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11473 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11474 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11475 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11476 CALL_EXPR_ARG (arg1, 0), 0))
11478 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11480 if (tanfn != NULL_TREE)
11481 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11484 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11485 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11486 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11487 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11488 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11489 CALL_EXPR_ARG (arg1, 0), 0))
11491 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11493 if (tanfn != NULL_TREE)
11495 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11496 CALL_EXPR_ARG (arg0, 0));
11497 return fold_build2_loc (loc, RDIV_EXPR, type,
11498 build_real (type, dconst1), tmp);
11502 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11503 NaNs or Infinities. */
11504 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11505 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11506 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11508 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11509 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11511 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11512 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11513 && operand_equal_p (arg00, arg01, 0))
11515 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11517 if (cosfn != NULL_TREE)
11518 return build_call_expr_loc (loc, cosfn, 1, arg00);
11522 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11523 NaNs or Infinities. */
11524 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11525 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11526 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11528 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11529 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11531 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11532 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11533 && operand_equal_p (arg00, arg01, 0))
11535 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11537 if (cosfn != NULL_TREE)
11539 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11540 return fold_build2_loc (loc, RDIV_EXPR, type,
11541 build_real (type, dconst1),
11547 /* Optimize pow(x,c)/x as pow(x,c-1). */
11548 if (fcode0 == BUILT_IN_POW
11549 || fcode0 == BUILT_IN_POWF
11550 || fcode0 == BUILT_IN_POWL)
11552 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11553 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11554 if (TREE_CODE (arg01) == REAL_CST
11555 && !TREE_OVERFLOW (arg01)
11556 && operand_equal_p (arg1, arg00, 0))
11558 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11562 c = TREE_REAL_CST (arg01);
11563 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11564 arg = build_real (type, c);
11565 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11569 /* Optimize a/root(b/c) into a*root(c/b). */
11570 if (BUILTIN_ROOT_P (fcode1))
11572 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11574 if (TREE_CODE (rootarg) == RDIV_EXPR)
11576 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11577 tree b = TREE_OPERAND (rootarg, 0);
11578 tree c = TREE_OPERAND (rootarg, 1);
11580 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11582 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11583 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11587 /* Optimize x/expN(y) into x*expN(-y). */
11588 if (BUILTIN_EXPONENT_P (fcode1))
11590 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11591 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11592 arg1 = build_call_expr_loc (loc,
11594 fold_convert_loc (loc, type, arg));
11595 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11598 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11599 if (fcode1 == BUILT_IN_POW
11600 || fcode1 == BUILT_IN_POWF
11601 || fcode1 == BUILT_IN_POWL)
11603 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11604 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11605 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11606 tree neg11 = fold_convert_loc (loc, type,
11607 negate_expr (arg11));
11608 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11609 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11614 case TRUNC_DIV_EXPR:
11615 /* Optimize (X & (-A)) / A where A is a power of 2,
11617 if (TREE_CODE (arg0) == BIT_AND_EXPR
11618 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11619 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11621 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11622 arg1, TREE_OPERAND (arg0, 1));
11623 if (sum && integer_zerop (sum)) {
11624 unsigned long pow2;
11626 if (TREE_INT_CST_LOW (arg1))
11627 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11629 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11630 + HOST_BITS_PER_WIDE_INT;
11632 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11633 TREE_OPERAND (arg0, 0),
11634 build_int_cst (NULL_TREE, pow2));
11640 case FLOOR_DIV_EXPR:
11641 /* Simplify A / (B << N) where A and B are positive and B is
11642 a power of 2, to A >> (N + log2(B)). */
11643 strict_overflow_p = false;
11644 if (TREE_CODE (arg1) == LSHIFT_EXPR
11645 && (TYPE_UNSIGNED (type)
11646 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11648 tree sval = TREE_OPERAND (arg1, 0);
11649 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11651 tree sh_cnt = TREE_OPERAND (arg1, 1);
11652 unsigned long pow2;
11654 if (TREE_INT_CST_LOW (sval))
11655 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11657 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11658 + HOST_BITS_PER_WIDE_INT;
11660 if (strict_overflow_p)
11661 fold_overflow_warning (("assuming signed overflow does not "
11662 "occur when simplifying A / (B << N)"),
11663 WARN_STRICT_OVERFLOW_MISC);
11665 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11666 sh_cnt, build_int_cst (NULL_TREE, pow2));
11667 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11668 fold_convert_loc (loc, type, arg0), sh_cnt);
11672 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11673 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11674 if (INTEGRAL_TYPE_P (type)
11675 && TYPE_UNSIGNED (type)
11676 && code == FLOOR_DIV_EXPR)
11677 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11681 case ROUND_DIV_EXPR:
11682 case CEIL_DIV_EXPR:
11683 case EXACT_DIV_EXPR:
11684 if (integer_onep (arg1))
11685 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11686 if (integer_zerop (arg1))
11688 /* X / -1 is -X. */
11689 if (!TYPE_UNSIGNED (type)
11690 && TREE_CODE (arg1) == INTEGER_CST
11691 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11692 && TREE_INT_CST_HIGH (arg1) == -1)
11693 return fold_convert_loc (loc, type, negate_expr (arg0));
11695 /* Convert -A / -B to A / B when the type is signed and overflow is
11697 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11698 && TREE_CODE (arg0) == NEGATE_EXPR
11699 && negate_expr_p (arg1))
11701 if (INTEGRAL_TYPE_P (type))
11702 fold_overflow_warning (("assuming signed overflow does not occur "
11703 "when distributing negation across "
11705 WARN_STRICT_OVERFLOW_MISC);
11706 return fold_build2_loc (loc, code, type,
11707 fold_convert_loc (loc, type,
11708 TREE_OPERAND (arg0, 0)),
11709 fold_convert_loc (loc, type,
11710 negate_expr (arg1)));
11712 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11713 && TREE_CODE (arg1) == NEGATE_EXPR
11714 && negate_expr_p (arg0))
11716 if (INTEGRAL_TYPE_P (type))
11717 fold_overflow_warning (("assuming signed overflow does not occur "
11718 "when distributing negation across "
11720 WARN_STRICT_OVERFLOW_MISC);
11721 return fold_build2_loc (loc, code, type,
11722 fold_convert_loc (loc, type,
11723 negate_expr (arg0)),
11724 fold_convert_loc (loc, type,
11725 TREE_OPERAND (arg1, 0)));
11728 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11729 operation, EXACT_DIV_EXPR.
11731 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11732 At one time others generated faster code, it's not clear if they do
11733 after the last round to changes to the DIV code in expmed.c. */
11734 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11735 && multiple_of_p (type, arg0, arg1))
11736 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11738 strict_overflow_p = false;
11739 if (TREE_CODE (arg1) == INTEGER_CST
11740 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11741 &strict_overflow_p)))
11743 if (strict_overflow_p)
11744 fold_overflow_warning (("assuming signed overflow does not occur "
11745 "when simplifying division"),
11746 WARN_STRICT_OVERFLOW_MISC);
11747 return fold_convert_loc (loc, type, tem);
11752 case CEIL_MOD_EXPR:
11753 case FLOOR_MOD_EXPR:
11754 case ROUND_MOD_EXPR:
11755 case TRUNC_MOD_EXPR:
11756 /* X % 1 is always zero, but be sure to preserve any side
11758 if (integer_onep (arg1))
11759 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11761 /* X % 0, return X % 0 unchanged so that we can get the
11762 proper warnings and errors. */
11763 if (integer_zerop (arg1))
11766 /* 0 % X is always zero, but be sure to preserve any side
11767 effects in X. Place this after checking for X == 0. */
11768 if (integer_zerop (arg0))
11769 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11771 /* X % -1 is zero. */
11772 if (!TYPE_UNSIGNED (type)
11773 && TREE_CODE (arg1) == INTEGER_CST
11774 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11775 && TREE_INT_CST_HIGH (arg1) == -1)
11776 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11778 /* X % -C is the same as X % C. */
11779 if (code == TRUNC_MOD_EXPR
11780 && !TYPE_UNSIGNED (type)
11781 && TREE_CODE (arg1) == INTEGER_CST
11782 && !TREE_OVERFLOW (arg1)
11783 && TREE_INT_CST_HIGH (arg1) < 0
11784 && !TYPE_OVERFLOW_TRAPS (type)
11785 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11786 && !sign_bit_p (arg1, arg1))
11787 return fold_build2_loc (loc, code, type,
11788 fold_convert_loc (loc, type, arg0),
11789 fold_convert_loc (loc, type,
11790 negate_expr (arg1)));
11792 /* X % -Y is the same as X % Y. */
11793 if (code == TRUNC_MOD_EXPR
11794 && !TYPE_UNSIGNED (type)
11795 && TREE_CODE (arg1) == NEGATE_EXPR
11796 && !TYPE_OVERFLOW_TRAPS (type))
11797 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11798 fold_convert_loc (loc, type,
11799 TREE_OPERAND (arg1, 0)));
11801 strict_overflow_p = false;
11802 if (TREE_CODE (arg1) == INTEGER_CST
11803 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11804 &strict_overflow_p)))
11806 if (strict_overflow_p)
11807 fold_overflow_warning (("assuming signed overflow does not occur "
11808 "when simplifying modulus"),
11809 WARN_STRICT_OVERFLOW_MISC);
11810 return fold_convert_loc (loc, type, tem);
11813 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11814 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11815 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11816 && (TYPE_UNSIGNED (type)
11817 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11820 /* Also optimize A % (C << N) where C is a power of 2,
11821 to A & ((C << N) - 1). */
11822 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11823 c = TREE_OPERAND (arg1, 0);
11825 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11828 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11829 build_int_cst (TREE_TYPE (arg1), 1));
11830 if (strict_overflow_p)
11831 fold_overflow_warning (("assuming signed overflow does not "
11832 "occur when simplifying "
11833 "X % (power of two)"),
11834 WARN_STRICT_OVERFLOW_MISC);
11835 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11836 fold_convert_loc (loc, type, arg0),
11837 fold_convert_loc (loc, type, mask));
11845 if (integer_all_onesp (arg0))
11846 return omit_one_operand_loc (loc, type, arg0, arg1);
11850 /* Optimize -1 >> x for arithmetic right shifts. */
11851 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11852 && tree_expr_nonnegative_p (arg1))
11853 return omit_one_operand_loc (loc, type, arg0, arg1);
11854 /* ... fall through ... */
11858 if (integer_zerop (arg1))
11859 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11860 if (integer_zerop (arg0))
11861 return omit_one_operand_loc (loc, type, arg0, arg1);
11863 /* Since negative shift count is not well-defined,
11864 don't try to compute it in the compiler. */
11865 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11868 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11869 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11870 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11871 && host_integerp (TREE_OPERAND (arg0, 1), false)
11872 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11874 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11875 + TREE_INT_CST_LOW (arg1));
11877 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11878 being well defined. */
11879 if (low >= TYPE_PRECISION (type))
11881 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11882 low = low % TYPE_PRECISION (type);
11883 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11884 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11885 TREE_OPERAND (arg0, 0));
11887 low = TYPE_PRECISION (type) - 1;
11890 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11891 build_int_cst (type, low));
11894 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11895 into x & ((unsigned)-1 >> c) for unsigned types. */
11896 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11897 || (TYPE_UNSIGNED (type)
11898 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11899 && host_integerp (arg1, false)
11900 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11901 && host_integerp (TREE_OPERAND (arg0, 1), false)
11902 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11904 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11905 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11911 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11913 lshift = build_int_cst (type, -1);
11914 lshift = int_const_binop (code, lshift, arg1, 0);
11916 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11920 /* Rewrite an LROTATE_EXPR by a constant into an
11921 RROTATE_EXPR by a new constant. */
11922 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11924 tree tem = build_int_cst (TREE_TYPE (arg1),
11925 TYPE_PRECISION (type));
11926 tem = const_binop (MINUS_EXPR, tem, arg1);
11927 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11930 /* If we have a rotate of a bit operation with the rotate count and
11931 the second operand of the bit operation both constant,
11932 permute the two operations. */
11933 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11934 && (TREE_CODE (arg0) == BIT_AND_EXPR
11935 || TREE_CODE (arg0) == BIT_IOR_EXPR
11936 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11937 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11938 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11939 fold_build2_loc (loc, code, type,
11940 TREE_OPERAND (arg0, 0), arg1),
11941 fold_build2_loc (loc, code, type,
11942 TREE_OPERAND (arg0, 1), arg1));
11944 /* Two consecutive rotates adding up to the precision of the
11945 type can be ignored. */
11946 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11947 && TREE_CODE (arg0) == RROTATE_EXPR
11948 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11949 && TREE_INT_CST_HIGH (arg1) == 0
11950 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11951 && ((TREE_INT_CST_LOW (arg1)
11952 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11953 == (unsigned int) TYPE_PRECISION (type)))
11954 return TREE_OPERAND (arg0, 0);
11956 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11957 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11958 if the latter can be further optimized. */
11959 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11960 && TREE_CODE (arg0) == BIT_AND_EXPR
11961 && TREE_CODE (arg1) == INTEGER_CST
11962 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11964 tree mask = fold_build2_loc (loc, code, type,
11965 fold_convert_loc (loc, type,
11966 TREE_OPERAND (arg0, 1)),
11968 tree shift = fold_build2_loc (loc, code, type,
11969 fold_convert_loc (loc, type,
11970 TREE_OPERAND (arg0, 0)),
11972 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11980 if (operand_equal_p (arg0, arg1, 0))
11981 return omit_one_operand_loc (loc, type, arg0, arg1);
11982 if (INTEGRAL_TYPE_P (type)
11983 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11984 return omit_one_operand_loc (loc, type, arg1, arg0);
11985 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11991 if (operand_equal_p (arg0, arg1, 0))
11992 return omit_one_operand_loc (loc, type, arg0, arg1);
11993 if (INTEGRAL_TYPE_P (type)
11994 && TYPE_MAX_VALUE (type)
11995 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11996 return omit_one_operand_loc (loc, type, arg1, arg0);
11997 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12002 case TRUTH_ANDIF_EXPR:
12003 /* Note that the operands of this must be ints
12004 and their values must be 0 or 1.
12005 ("true" is a fixed value perhaps depending on the language.) */
12006 /* If first arg is constant zero, return it. */
12007 if (integer_zerop (arg0))
12008 return fold_convert_loc (loc, type, arg0);
12009 case TRUTH_AND_EXPR:
12010 /* If either arg is constant true, drop it. */
12011 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12012 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12013 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12014 /* Preserve sequence points. */
12015 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12016 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12017 /* If second arg is constant zero, result is zero, but first arg
12018 must be evaluated. */
12019 if (integer_zerop (arg1))
12020 return omit_one_operand_loc (loc, type, arg1, arg0);
12021 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12022 case will be handled here. */
12023 if (integer_zerop (arg0))
12024 return omit_one_operand_loc (loc, type, arg0, arg1);
12026 /* !X && X is always false. */
12027 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12028 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12029 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12030 /* X && !X is always false. */
12031 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12032 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12033 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12035 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12036 means A >= Y && A != MAX, but in this case we know that
12039 if (!TREE_SIDE_EFFECTS (arg0)
12040 && !TREE_SIDE_EFFECTS (arg1))
12042 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12043 if (tem && !operand_equal_p (tem, arg0, 0))
12044 return fold_build2_loc (loc, code, type, tem, arg1);
12046 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12047 if (tem && !operand_equal_p (tem, arg1, 0))
12048 return fold_build2_loc (loc, code, type, arg0, tem);
12052 /* We only do these simplifications if we are optimizing. */
12056 /* Check for things like (A || B) && (A || C). We can convert this
12057 to A || (B && C). Note that either operator can be any of the four
12058 truth and/or operations and the transformation will still be
12059 valid. Also note that we only care about order for the
12060 ANDIF and ORIF operators. If B contains side effects, this
12061 might change the truth-value of A. */
12062 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12063 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12064 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12065 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12066 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12067 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12069 tree a00 = TREE_OPERAND (arg0, 0);
12070 tree a01 = TREE_OPERAND (arg0, 1);
12071 tree a10 = TREE_OPERAND (arg1, 0);
12072 tree a11 = TREE_OPERAND (arg1, 1);
12073 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12074 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12075 && (code == TRUTH_AND_EXPR
12076 || code == TRUTH_OR_EXPR));
12078 if (operand_equal_p (a00, a10, 0))
12079 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12080 fold_build2_loc (loc, code, type, a01, a11));
12081 else if (commutative && operand_equal_p (a00, a11, 0))
12082 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12083 fold_build2_loc (loc, code, type, a01, a10));
12084 else if (commutative && operand_equal_p (a01, a10, 0))
12085 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12086 fold_build2_loc (loc, code, type, a00, a11));
12088 /* This case if tricky because we must either have commutative
12089 operators or else A10 must not have side-effects. */
12091 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12092 && operand_equal_p (a01, a11, 0))
12093 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12094 fold_build2_loc (loc, code, type, a00, a10),
12098 /* See if we can build a range comparison. */
12099 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12102 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12103 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12105 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12107 return fold_build2_loc (loc, code, type, tem, arg1);
12110 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12111 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12113 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12115 return fold_build2_loc (loc, code, type, arg0, tem);
12118 /* Check for the possibility of merging component references. If our
12119 lhs is another similar operation, try to merge its rhs with our
12120 rhs. Then try to merge our lhs and rhs. */
12121 if (TREE_CODE (arg0) == code
12122 && 0 != (tem = fold_truthop (loc, code, type,
12123 TREE_OPERAND (arg0, 1), arg1)))
12124 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12126 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12131 case TRUTH_ORIF_EXPR:
12132 /* Note that the operands of this must be ints
12133 and their values must be 0 or true.
12134 ("true" is a fixed value perhaps depending on the language.) */
12135 /* If first arg is constant true, return it. */
12136 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12137 return fold_convert_loc (loc, type, arg0);
12138 case TRUTH_OR_EXPR:
12139 /* If either arg is constant zero, drop it. */
12140 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12141 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12142 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12143 /* Preserve sequence points. */
12144 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12146 /* If second arg is constant true, result is true, but we must
12147 evaluate first arg. */
12148 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12149 return omit_one_operand_loc (loc, type, arg1, arg0);
12150 /* Likewise for first arg, but note this only occurs here for
12152 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12153 return omit_one_operand_loc (loc, type, arg0, arg1);
12155 /* !X || X is always true. */
12156 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12157 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12158 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12159 /* X || !X is always true. */
12160 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12161 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12162 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12166 case TRUTH_XOR_EXPR:
12167 /* If the second arg is constant zero, drop it. */
12168 if (integer_zerop (arg1))
12169 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12170 /* If the second arg is constant true, this is a logical inversion. */
12171 if (integer_onep (arg1))
12173 /* Only call invert_truthvalue if operand is a truth value. */
12174 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12175 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12177 tem = invert_truthvalue_loc (loc, arg0);
12178 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12180 /* Identical arguments cancel to zero. */
12181 if (operand_equal_p (arg0, arg1, 0))
12182 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12184 /* !X ^ X is always true. */
12185 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12186 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12187 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12189 /* X ^ !X is always true. */
12190 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12191 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12192 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12198 tem = fold_comparison (loc, code, type, op0, op1);
12199 if (tem != NULL_TREE)
12202 /* bool_var != 0 becomes bool_var. */
12203 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12204 && code == NE_EXPR)
12205 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12207 /* bool_var == 1 becomes bool_var. */
12208 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12209 && code == EQ_EXPR)
12210 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12212 /* bool_var != 1 becomes !bool_var. */
12213 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12214 && code == NE_EXPR)
12215 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12216 fold_convert_loc (loc, type, arg0));
12218 /* bool_var == 0 becomes !bool_var. */
12219 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12220 && code == EQ_EXPR)
12221 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12222 fold_convert_loc (loc, type, arg0));
12224 /* !exp != 0 becomes !exp */
12225 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12226 && code == NE_EXPR)
12227 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12229 /* If this is an equality comparison of the address of two non-weak,
12230 unaliased symbols neither of which are extern (since we do not
12231 have access to attributes for externs), then we know the result. */
12232 if (TREE_CODE (arg0) == ADDR_EXPR
12233 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12234 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12235 && ! lookup_attribute ("alias",
12236 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12237 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12238 && TREE_CODE (arg1) == ADDR_EXPR
12239 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12240 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12241 && ! lookup_attribute ("alias",
12242 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12243 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12245 /* We know that we're looking at the address of two
12246 non-weak, unaliased, static _DECL nodes.
12248 It is both wasteful and incorrect to call operand_equal_p
12249 to compare the two ADDR_EXPR nodes. It is wasteful in that
12250 all we need to do is test pointer equality for the arguments
12251 to the two ADDR_EXPR nodes. It is incorrect to use
12252 operand_equal_p as that function is NOT equivalent to a
12253 C equality test. It can in fact return false for two
12254 objects which would test as equal using the C equality
12256 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12257 return constant_boolean_node (equal
12258 ? code == EQ_EXPR : code != EQ_EXPR,
12262 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12263 a MINUS_EXPR of a constant, we can convert it into a comparison with
12264 a revised constant as long as no overflow occurs. */
12265 if (TREE_CODE (arg1) == INTEGER_CST
12266 && (TREE_CODE (arg0) == PLUS_EXPR
12267 || TREE_CODE (arg0) == MINUS_EXPR)
12268 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12269 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12270 ? MINUS_EXPR : PLUS_EXPR,
12271 fold_convert_loc (loc, TREE_TYPE (arg0),
12273 TREE_OPERAND (arg0, 1)))
12274 && !TREE_OVERFLOW (tem))
12275 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12277 /* Similarly for a NEGATE_EXPR. */
12278 if (TREE_CODE (arg0) == NEGATE_EXPR
12279 && TREE_CODE (arg1) == INTEGER_CST
12280 && 0 != (tem = negate_expr (arg1))
12281 && TREE_CODE (tem) == INTEGER_CST
12282 && !TREE_OVERFLOW (tem))
12283 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12285 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12286 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12287 && TREE_CODE (arg1) == INTEGER_CST
12288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12289 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12290 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12291 fold_convert_loc (loc,
12294 TREE_OPERAND (arg0, 1)));
12296 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12297 if ((TREE_CODE (arg0) == PLUS_EXPR
12298 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12299 || TREE_CODE (arg0) == MINUS_EXPR)
12300 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12301 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12302 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12304 tree val = TREE_OPERAND (arg0, 1);
12305 return omit_two_operands_loc (loc, type,
12306 fold_build2_loc (loc, code, type,
12308 build_int_cst (TREE_TYPE (val),
12310 TREE_OPERAND (arg0, 0), arg1);
12313 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12314 if (TREE_CODE (arg0) == MINUS_EXPR
12315 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12316 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12317 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12319 return omit_two_operands_loc (loc, type,
12321 ? boolean_true_node : boolean_false_node,
12322 TREE_OPERAND (arg0, 1), arg1);
12325 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12326 for !=. Don't do this for ordered comparisons due to overflow. */
12327 if (TREE_CODE (arg0) == MINUS_EXPR
12328 && integer_zerop (arg1))
12329 return fold_build2_loc (loc, code, type,
12330 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12332 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12333 if (TREE_CODE (arg0) == ABS_EXPR
12334 && (integer_zerop (arg1) || real_zerop (arg1)))
12335 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12337 /* If this is an EQ or NE comparison with zero and ARG0 is
12338 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12339 two operations, but the latter can be done in one less insn
12340 on machines that have only two-operand insns or on which a
12341 constant cannot be the first operand. */
12342 if (TREE_CODE (arg0) == BIT_AND_EXPR
12343 && integer_zerop (arg1))
12345 tree arg00 = TREE_OPERAND (arg0, 0);
12346 tree arg01 = TREE_OPERAND (arg0, 1);
12347 if (TREE_CODE (arg00) == LSHIFT_EXPR
12348 && integer_onep (TREE_OPERAND (arg00, 0)))
12350 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12351 arg01, TREE_OPERAND (arg00, 1));
12352 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12353 build_int_cst (TREE_TYPE (arg0), 1));
12354 return fold_build2_loc (loc, code, type,
12355 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12358 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12359 && integer_onep (TREE_OPERAND (arg01, 0)))
12361 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12362 arg00, TREE_OPERAND (arg01, 1));
12363 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12364 build_int_cst (TREE_TYPE (arg0), 1));
12365 return fold_build2_loc (loc, code, type,
12366 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12371 /* If this is an NE or EQ comparison of zero against the result of a
12372 signed MOD operation whose second operand is a power of 2, make
12373 the MOD operation unsigned since it is simpler and equivalent. */
12374 if (integer_zerop (arg1)
12375 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12376 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12377 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12378 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12379 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12380 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12382 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12383 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12384 fold_convert_loc (loc, newtype,
12385 TREE_OPERAND (arg0, 0)),
12386 fold_convert_loc (loc, newtype,
12387 TREE_OPERAND (arg0, 1)));
12389 return fold_build2_loc (loc, code, type, newmod,
12390 fold_convert_loc (loc, newtype, arg1));
12393 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12394 C1 is a valid shift constant, and C2 is a power of two, i.e.
12396 if (TREE_CODE (arg0) == BIT_AND_EXPR
12397 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12398 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12400 && integer_pow2p (TREE_OPERAND (arg0, 1))
12401 && integer_zerop (arg1))
12403 tree itype = TREE_TYPE (arg0);
12404 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12405 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12407 /* Check for a valid shift count. */
12408 if (TREE_INT_CST_HIGH (arg001) == 0
12409 && TREE_INT_CST_LOW (arg001) < prec)
12411 tree arg01 = TREE_OPERAND (arg0, 1);
12412 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12413 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12414 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12415 can be rewritten as (X & (C2 << C1)) != 0. */
12416 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12418 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12419 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12420 return fold_build2_loc (loc, code, type, tem, arg1);
12422 /* Otherwise, for signed (arithmetic) shifts,
12423 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12424 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12425 else if (!TYPE_UNSIGNED (itype))
12426 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12427 arg000, build_int_cst (itype, 0));
12428 /* Otherwise, of unsigned (logical) shifts,
12429 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12430 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12432 return omit_one_operand_loc (loc, type,
12433 code == EQ_EXPR ? integer_one_node
12434 : integer_zero_node,
12439 /* If this is an NE comparison of zero with an AND of one, remove the
12440 comparison since the AND will give the correct value. */
12441 if (code == NE_EXPR
12442 && integer_zerop (arg1)
12443 && TREE_CODE (arg0) == BIT_AND_EXPR
12444 && integer_onep (TREE_OPERAND (arg0, 1)))
12445 return fold_convert_loc (loc, type, arg0);
12447 /* If we have (A & C) == C where C is a power of 2, convert this into
12448 (A & C) != 0. Similarly for NE_EXPR. */
12449 if (TREE_CODE (arg0) == BIT_AND_EXPR
12450 && integer_pow2p (TREE_OPERAND (arg0, 1))
12451 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12452 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12453 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12454 integer_zero_node));
12456 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12457 bit, then fold the expression into A < 0 or A >= 0. */
12458 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12462 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12463 Similarly for NE_EXPR. */
12464 if (TREE_CODE (arg0) == BIT_AND_EXPR
12465 && TREE_CODE (arg1) == INTEGER_CST
12466 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12468 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12469 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12470 TREE_OPERAND (arg0, 1));
12471 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12473 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12474 if (integer_nonzerop (dandnotc))
12475 return omit_one_operand_loc (loc, type, rslt, arg0);
12478 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12479 Similarly for NE_EXPR. */
12480 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12481 && TREE_CODE (arg1) == INTEGER_CST
12482 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12484 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12485 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12486 TREE_OPERAND (arg0, 1), notd);
12487 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12488 if (integer_nonzerop (candnotd))
12489 return omit_one_operand_loc (loc, type, rslt, arg0);
12492 /* If this is a comparison of a field, we may be able to simplify it. */
12493 if ((TREE_CODE (arg0) == COMPONENT_REF
12494 || TREE_CODE (arg0) == BIT_FIELD_REF)
12495 /* Handle the constant case even without -O
12496 to make sure the warnings are given. */
12497 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12499 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12504 /* Optimize comparisons of strlen vs zero to a compare of the
12505 first character of the string vs zero. To wit,
12506 strlen(ptr) == 0 => *ptr == 0
12507 strlen(ptr) != 0 => *ptr != 0
12508 Other cases should reduce to one of these two (or a constant)
12509 due to the return value of strlen being unsigned. */
12510 if (TREE_CODE (arg0) == CALL_EXPR
12511 && integer_zerop (arg1))
12513 tree fndecl = get_callee_fndecl (arg0);
12516 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12517 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12518 && call_expr_nargs (arg0) == 1
12519 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12521 tree iref = build_fold_indirect_ref_loc (loc,
12522 CALL_EXPR_ARG (arg0, 0));
12523 return fold_build2_loc (loc, code, type, iref,
12524 build_int_cst (TREE_TYPE (iref), 0));
12528 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12529 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12530 if (TREE_CODE (arg0) == RSHIFT_EXPR
12531 && integer_zerop (arg1)
12532 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12534 tree arg00 = TREE_OPERAND (arg0, 0);
12535 tree arg01 = TREE_OPERAND (arg0, 1);
12536 tree itype = TREE_TYPE (arg00);
12537 if (TREE_INT_CST_HIGH (arg01) == 0
12538 && TREE_INT_CST_LOW (arg01)
12539 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12541 if (TYPE_UNSIGNED (itype))
12543 itype = signed_type_for (itype);
12544 arg00 = fold_convert_loc (loc, itype, arg00);
12546 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12547 type, arg00, build_int_cst (itype, 0));
12551 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12552 if (integer_zerop (arg1)
12553 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12554 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12555 TREE_OPERAND (arg0, 1));
12557 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12558 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12559 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12560 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12561 build_int_cst (TREE_TYPE (arg1), 0));
12562 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12563 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12564 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12565 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12566 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12567 build_int_cst (TREE_TYPE (arg1), 0));
12569 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12570 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12571 && TREE_CODE (arg1) == INTEGER_CST
12572 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12573 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12574 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12575 TREE_OPERAND (arg0, 1), arg1));
12577 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12578 (X & C) == 0 when C is a single bit. */
12579 if (TREE_CODE (arg0) == BIT_AND_EXPR
12580 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12581 && integer_zerop (arg1)
12582 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12584 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12585 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12586 TREE_OPERAND (arg0, 1));
12587 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12591 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12592 constant C is a power of two, i.e. a single bit. */
12593 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12594 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12595 && integer_zerop (arg1)
12596 && integer_pow2p (TREE_OPERAND (arg0, 1))
12597 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12598 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12600 tree arg00 = TREE_OPERAND (arg0, 0);
12601 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12602 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12605 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12606 when is C is a power of two, i.e. a single bit. */
12607 if (TREE_CODE (arg0) == BIT_AND_EXPR
12608 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12609 && integer_zerop (arg1)
12610 && integer_pow2p (TREE_OPERAND (arg0, 1))
12611 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12612 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12614 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12615 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12616 arg000, TREE_OPERAND (arg0, 1));
12617 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12618 tem, build_int_cst (TREE_TYPE (tem), 0));
12621 if (integer_zerop (arg1)
12622 && tree_expr_nonzero_p (arg0))
12624 tree res = constant_boolean_node (code==NE_EXPR, type);
12625 return omit_one_operand_loc (loc, type, res, arg0);
12628 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12629 if (TREE_CODE (arg0) == NEGATE_EXPR
12630 && TREE_CODE (arg1) == NEGATE_EXPR)
12631 return fold_build2_loc (loc, code, type,
12632 TREE_OPERAND (arg0, 0),
12633 TREE_OPERAND (arg1, 0));
12635 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12636 if (TREE_CODE (arg0) == BIT_AND_EXPR
12637 && TREE_CODE (arg1) == BIT_AND_EXPR)
12639 tree arg00 = TREE_OPERAND (arg0, 0);
12640 tree arg01 = TREE_OPERAND (arg0, 1);
12641 tree arg10 = TREE_OPERAND (arg1, 0);
12642 tree arg11 = TREE_OPERAND (arg1, 1);
12643 tree itype = TREE_TYPE (arg0);
12645 if (operand_equal_p (arg01, arg11, 0))
12646 return fold_build2_loc (loc, code, type,
12647 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12648 fold_build2_loc (loc,
12649 BIT_XOR_EXPR, itype,
12652 build_int_cst (itype, 0));
12654 if (operand_equal_p (arg01, arg10, 0))
12655 return fold_build2_loc (loc, code, type,
12656 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12657 fold_build2_loc (loc,
12658 BIT_XOR_EXPR, itype,
12661 build_int_cst (itype, 0));
12663 if (operand_equal_p (arg00, arg11, 0))
12664 return fold_build2_loc (loc, code, type,
12665 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12666 fold_build2_loc (loc,
12667 BIT_XOR_EXPR, itype,
12670 build_int_cst (itype, 0));
12672 if (operand_equal_p (arg00, arg10, 0))
12673 return fold_build2_loc (loc, code, type,
12674 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12675 fold_build2_loc (loc,
12676 BIT_XOR_EXPR, itype,
12679 build_int_cst (itype, 0));
12682 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12683 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12685 tree arg00 = TREE_OPERAND (arg0, 0);
12686 tree arg01 = TREE_OPERAND (arg0, 1);
12687 tree arg10 = TREE_OPERAND (arg1, 0);
12688 tree arg11 = TREE_OPERAND (arg1, 1);
12689 tree itype = TREE_TYPE (arg0);
12691 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12692 operand_equal_p guarantees no side-effects so we don't need
12693 to use omit_one_operand on Z. */
12694 if (operand_equal_p (arg01, arg11, 0))
12695 return fold_build2_loc (loc, code, type, arg00, arg10);
12696 if (operand_equal_p (arg01, arg10, 0))
12697 return fold_build2_loc (loc, code, type, arg00, arg11);
12698 if (operand_equal_p (arg00, arg11, 0))
12699 return fold_build2_loc (loc, code, type, arg01, arg10);
12700 if (operand_equal_p (arg00, arg10, 0))
12701 return fold_build2_loc (loc, code, type, arg01, arg11);
12703 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12704 if (TREE_CODE (arg01) == INTEGER_CST
12705 && TREE_CODE (arg11) == INTEGER_CST)
12706 return fold_build2_loc (loc, code, type,
12707 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12708 fold_build2_loc (loc,
12709 BIT_XOR_EXPR, itype,
12714 /* Attempt to simplify equality/inequality comparisons of complex
12715 values. Only lower the comparison if the result is known or
12716 can be simplified to a single scalar comparison. */
12717 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12718 || TREE_CODE (arg0) == COMPLEX_CST)
12719 && (TREE_CODE (arg1) == COMPLEX_EXPR
12720 || TREE_CODE (arg1) == COMPLEX_CST))
12722 tree real0, imag0, real1, imag1;
12725 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12727 real0 = TREE_OPERAND (arg0, 0);
12728 imag0 = TREE_OPERAND (arg0, 1);
12732 real0 = TREE_REALPART (arg0);
12733 imag0 = TREE_IMAGPART (arg0);
12736 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12738 real1 = TREE_OPERAND (arg1, 0);
12739 imag1 = TREE_OPERAND (arg1, 1);
12743 real1 = TREE_REALPART (arg1);
12744 imag1 = TREE_IMAGPART (arg1);
12747 rcond = fold_binary_loc (loc, code, type, real0, real1);
12748 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12750 if (integer_zerop (rcond))
12752 if (code == EQ_EXPR)
12753 return omit_two_operands_loc (loc, type, boolean_false_node,
12755 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12759 if (code == NE_EXPR)
12760 return omit_two_operands_loc (loc, type, boolean_true_node,
12762 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12766 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12767 if (icond && TREE_CODE (icond) == INTEGER_CST)
12769 if (integer_zerop (icond))
12771 if (code == EQ_EXPR)
12772 return omit_two_operands_loc (loc, type, boolean_false_node,
12774 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12778 if (code == NE_EXPR)
12779 return omit_two_operands_loc (loc, type, boolean_true_node,
12781 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12792 tem = fold_comparison (loc, code, type, op0, op1);
12793 if (tem != NULL_TREE)
12796 /* Transform comparisons of the form X +- C CMP X. */
12797 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12798 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12799 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12800 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12801 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12802 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12804 tree arg01 = TREE_OPERAND (arg0, 1);
12805 enum tree_code code0 = TREE_CODE (arg0);
12808 if (TREE_CODE (arg01) == REAL_CST)
12809 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12811 is_positive = tree_int_cst_sgn (arg01);
12813 /* (X - c) > X becomes false. */
12814 if (code == GT_EXPR
12815 && ((code0 == MINUS_EXPR && is_positive >= 0)
12816 || (code0 == PLUS_EXPR && is_positive <= 0)))
12818 if (TREE_CODE (arg01) == INTEGER_CST
12819 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12820 fold_overflow_warning (("assuming signed overflow does not "
12821 "occur when assuming that (X - c) > X "
12822 "is always false"),
12823 WARN_STRICT_OVERFLOW_ALL);
12824 return constant_boolean_node (0, type);
12827 /* Likewise (X + c) < X becomes false. */
12828 if (code == LT_EXPR
12829 && ((code0 == PLUS_EXPR && is_positive >= 0)
12830 || (code0 == MINUS_EXPR && is_positive <= 0)))
12832 if (TREE_CODE (arg01) == INTEGER_CST
12833 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12834 fold_overflow_warning (("assuming signed overflow does not "
12835 "occur when assuming that "
12836 "(X + c) < X is always false"),
12837 WARN_STRICT_OVERFLOW_ALL);
12838 return constant_boolean_node (0, type);
12841 /* Convert (X - c) <= X to true. */
12842 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12844 && ((code0 == MINUS_EXPR && is_positive >= 0)
12845 || (code0 == PLUS_EXPR && is_positive <= 0)))
12847 if (TREE_CODE (arg01) == INTEGER_CST
12848 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12849 fold_overflow_warning (("assuming signed overflow does not "
12850 "occur when assuming that "
12851 "(X - c) <= X is always true"),
12852 WARN_STRICT_OVERFLOW_ALL);
12853 return constant_boolean_node (1, type);
12856 /* Convert (X + c) >= X to true. */
12857 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12859 && ((code0 == PLUS_EXPR && is_positive >= 0)
12860 || (code0 == MINUS_EXPR && is_positive <= 0)))
12862 if (TREE_CODE (arg01) == INTEGER_CST
12863 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12864 fold_overflow_warning (("assuming signed overflow does not "
12865 "occur when assuming that "
12866 "(X + c) >= X is always true"),
12867 WARN_STRICT_OVERFLOW_ALL);
12868 return constant_boolean_node (1, type);
12871 if (TREE_CODE (arg01) == INTEGER_CST)
12873 /* Convert X + c > X and X - c < X to true for integers. */
12874 if (code == GT_EXPR
12875 && ((code0 == PLUS_EXPR && is_positive > 0)
12876 || (code0 == MINUS_EXPR && is_positive < 0)))
12878 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12879 fold_overflow_warning (("assuming signed overflow does "
12880 "not occur when assuming that "
12881 "(X + c) > X is always true"),
12882 WARN_STRICT_OVERFLOW_ALL);
12883 return constant_boolean_node (1, type);
12886 if (code == LT_EXPR
12887 && ((code0 == MINUS_EXPR && is_positive > 0)
12888 || (code0 == PLUS_EXPR && is_positive < 0)))
12890 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12891 fold_overflow_warning (("assuming signed overflow does "
12892 "not occur when assuming that "
12893 "(X - c) < X is always true"),
12894 WARN_STRICT_OVERFLOW_ALL);
12895 return constant_boolean_node (1, type);
12898 /* Convert X + c <= X and X - c >= X to false for integers. */
12899 if (code == LE_EXPR
12900 && ((code0 == PLUS_EXPR && is_positive > 0)
12901 || (code0 == MINUS_EXPR && is_positive < 0)))
12903 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12904 fold_overflow_warning (("assuming signed overflow does "
12905 "not occur when assuming that "
12906 "(X + c) <= X is always false"),
12907 WARN_STRICT_OVERFLOW_ALL);
12908 return constant_boolean_node (0, type);
12911 if (code == GE_EXPR
12912 && ((code0 == MINUS_EXPR && is_positive > 0)
12913 || (code0 == PLUS_EXPR && is_positive < 0)))
12915 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12916 fold_overflow_warning (("assuming signed overflow does "
12917 "not occur when assuming that "
12918 "(X - c) >= X is always false"),
12919 WARN_STRICT_OVERFLOW_ALL);
12920 return constant_boolean_node (0, type);
12925 /* Comparisons with the highest or lowest possible integer of
12926 the specified precision will have known values. */
12928 tree arg1_type = TREE_TYPE (arg1);
12929 unsigned int width = TYPE_PRECISION (arg1_type);
12931 if (TREE_CODE (arg1) == INTEGER_CST
12932 && width <= 2 * HOST_BITS_PER_WIDE_INT
12933 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12935 HOST_WIDE_INT signed_max_hi;
12936 unsigned HOST_WIDE_INT signed_max_lo;
12937 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12939 if (width <= HOST_BITS_PER_WIDE_INT)
12941 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12946 if (TYPE_UNSIGNED (arg1_type))
12948 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12954 max_lo = signed_max_lo;
12955 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12961 width -= HOST_BITS_PER_WIDE_INT;
12962 signed_max_lo = -1;
12963 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12968 if (TYPE_UNSIGNED (arg1_type))
12970 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12975 max_hi = signed_max_hi;
12976 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12980 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12981 && TREE_INT_CST_LOW (arg1) == max_lo)
12985 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12988 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12991 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12994 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12996 /* The GE_EXPR and LT_EXPR cases above are not normally
12997 reached because of previous transformations. */
13002 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13004 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13008 arg1 = const_binop (PLUS_EXPR, arg1,
13009 build_int_cst (TREE_TYPE (arg1), 1));
13010 return fold_build2_loc (loc, EQ_EXPR, type,
13011 fold_convert_loc (loc,
13012 TREE_TYPE (arg1), arg0),
13015 arg1 = const_binop (PLUS_EXPR, arg1,
13016 build_int_cst (TREE_TYPE (arg1), 1));
13017 return fold_build2_loc (loc, NE_EXPR, type,
13018 fold_convert_loc (loc, TREE_TYPE (arg1),
13024 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13026 && TREE_INT_CST_LOW (arg1) == min_lo)
13030 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13033 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13036 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13039 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13044 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13046 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13050 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13051 return fold_build2_loc (loc, NE_EXPR, type,
13052 fold_convert_loc (loc,
13053 TREE_TYPE (arg1), arg0),
13056 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13057 return fold_build2_loc (loc, EQ_EXPR, type,
13058 fold_convert_loc (loc, TREE_TYPE (arg1),
13065 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13066 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13067 && TYPE_UNSIGNED (arg1_type)
13068 /* We will flip the signedness of the comparison operator
13069 associated with the mode of arg1, so the sign bit is
13070 specified by this mode. Check that arg1 is the signed
13071 max associated with this sign bit. */
13072 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13073 /* signed_type does not work on pointer types. */
13074 && INTEGRAL_TYPE_P (arg1_type))
13076 /* The following case also applies to X < signed_max+1
13077 and X >= signed_max+1 because previous transformations. */
13078 if (code == LE_EXPR || code == GT_EXPR)
13081 st = signed_type_for (TREE_TYPE (arg1));
13082 return fold_build2_loc (loc,
13083 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13084 type, fold_convert_loc (loc, st, arg0),
13085 build_int_cst (st, 0));
13091 /* If we are comparing an ABS_EXPR with a constant, we can
13092 convert all the cases into explicit comparisons, but they may
13093 well not be faster than doing the ABS and one comparison.
13094 But ABS (X) <= C is a range comparison, which becomes a subtraction
13095 and a comparison, and is probably faster. */
13096 if (code == LE_EXPR
13097 && TREE_CODE (arg1) == INTEGER_CST
13098 && TREE_CODE (arg0) == ABS_EXPR
13099 && ! TREE_SIDE_EFFECTS (arg0)
13100 && (0 != (tem = negate_expr (arg1)))
13101 && TREE_CODE (tem) == INTEGER_CST
13102 && !TREE_OVERFLOW (tem))
13103 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13104 build2 (GE_EXPR, type,
13105 TREE_OPERAND (arg0, 0), tem),
13106 build2 (LE_EXPR, type,
13107 TREE_OPERAND (arg0, 0), arg1));
13109 /* Convert ABS_EXPR<x> >= 0 to true. */
13110 strict_overflow_p = false;
13111 if (code == GE_EXPR
13112 && (integer_zerop (arg1)
13113 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13114 && real_zerop (arg1)))
13115 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13117 if (strict_overflow_p)
13118 fold_overflow_warning (("assuming signed overflow does not occur "
13119 "when simplifying comparison of "
13120 "absolute value and zero"),
13121 WARN_STRICT_OVERFLOW_CONDITIONAL);
13122 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13125 /* Convert ABS_EXPR<x> < 0 to false. */
13126 strict_overflow_p = false;
13127 if (code == LT_EXPR
13128 && (integer_zerop (arg1) || real_zerop (arg1))
13129 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13131 if (strict_overflow_p)
13132 fold_overflow_warning (("assuming signed overflow does not occur "
13133 "when simplifying comparison of "
13134 "absolute value and zero"),
13135 WARN_STRICT_OVERFLOW_CONDITIONAL);
13136 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13139 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13140 and similarly for >= into !=. */
13141 if ((code == LT_EXPR || code == GE_EXPR)
13142 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13143 && TREE_CODE (arg1) == LSHIFT_EXPR
13144 && integer_onep (TREE_OPERAND (arg1, 0)))
13146 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13147 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13148 TREE_OPERAND (arg1, 1)),
13149 build_int_cst (TREE_TYPE (arg0), 0));
13150 goto fold_binary_exit;
13153 if ((code == LT_EXPR || code == GE_EXPR)
13154 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13155 && CONVERT_EXPR_P (arg1)
13156 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13157 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13159 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13160 fold_convert_loc (loc, TREE_TYPE (arg0),
13161 build2 (RSHIFT_EXPR,
13162 TREE_TYPE (arg0), arg0,
13163 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13165 build_int_cst (TREE_TYPE (arg0), 0));
13166 goto fold_binary_exit;
13171 case UNORDERED_EXPR:
13179 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13181 t1 = fold_relational_const (code, type, arg0, arg1);
13182 if (t1 != NULL_TREE)
13186 /* If the first operand is NaN, the result is constant. */
13187 if (TREE_CODE (arg0) == REAL_CST
13188 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13189 && (code != LTGT_EXPR || ! flag_trapping_math))
13191 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13192 ? integer_zero_node
13193 : integer_one_node;
13194 return omit_one_operand_loc (loc, type, t1, arg1);
13197 /* If the second operand is NaN, the result is constant. */
13198 if (TREE_CODE (arg1) == REAL_CST
13199 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13200 && (code != LTGT_EXPR || ! flag_trapping_math))
13202 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13203 ? integer_zero_node
13204 : integer_one_node;
13205 return omit_one_operand_loc (loc, type, t1, arg0);
13208 /* Simplify unordered comparison of something with itself. */
13209 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13210 && operand_equal_p (arg0, arg1, 0))
13211 return constant_boolean_node (1, type);
13213 if (code == LTGT_EXPR
13214 && !flag_trapping_math
13215 && operand_equal_p (arg0, arg1, 0))
13216 return constant_boolean_node (0, type);
13218 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13220 tree targ0 = strip_float_extensions (arg0);
13221 tree targ1 = strip_float_extensions (arg1);
13222 tree newtype = TREE_TYPE (targ0);
13224 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13225 newtype = TREE_TYPE (targ1);
13227 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13228 return fold_build2_loc (loc, code, type,
13229 fold_convert_loc (loc, newtype, targ0),
13230 fold_convert_loc (loc, newtype, targ1));
13235 case COMPOUND_EXPR:
13236 /* When pedantic, a compound expression can be neither an lvalue
13237 nor an integer constant expression. */
13238 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13240 /* Don't let (0, 0) be null pointer constant. */
13241 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13242 : fold_convert_loc (loc, type, arg1);
13243 return pedantic_non_lvalue_loc (loc, tem);
13246 if ((TREE_CODE (arg0) == REAL_CST
13247 && TREE_CODE (arg1) == REAL_CST)
13248 || (TREE_CODE (arg0) == INTEGER_CST
13249 && TREE_CODE (arg1) == INTEGER_CST))
13250 return build_complex (type, arg0, arg1);
13254 /* An ASSERT_EXPR should never be passed to fold_binary. */
13255 gcc_unreachable ();
13259 } /* switch (code) */
13261 protected_set_expr_location (tem, loc);
13265 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13266 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13270 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13272 switch (TREE_CODE (*tp))
13278 *walk_subtrees = 0;
13280 /* ... fall through ... */
13287 /* Return whether the sub-tree ST contains a label which is accessible from
13288 outside the sub-tree. */
13291 contains_label_p (tree st)
13294 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13297 /* Fold a ternary expression of code CODE and type TYPE with operands
13298 OP0, OP1, and OP2. Return the folded expression if folding is
13299 successful. Otherwise, return NULL_TREE. */
13302 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13303 tree op0, tree op1, tree op2)
13306 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13307 enum tree_code_class kind = TREE_CODE_CLASS (code);
13309 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13310 && TREE_CODE_LENGTH (code) == 3);
13312 /* Strip any conversions that don't change the mode. This is safe
13313 for every expression, except for a comparison expression because
13314 its signedness is derived from its operands. So, in the latter
13315 case, only strip conversions that don't change the signedness.
13317 Note that this is done as an internal manipulation within the
13318 constant folder, in order to find the simplest representation of
13319 the arguments so that their form can be studied. In any cases,
13320 the appropriate type conversions should be put back in the tree
13321 that will get out of the constant folder. */
13342 case COMPONENT_REF:
13343 if (TREE_CODE (arg0) == CONSTRUCTOR
13344 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13346 unsigned HOST_WIDE_INT idx;
13348 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13355 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13356 so all simple results must be passed through pedantic_non_lvalue. */
13357 if (TREE_CODE (arg0) == INTEGER_CST)
13359 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13360 tem = integer_zerop (arg0) ? op2 : op1;
13361 /* Only optimize constant conditions when the selected branch
13362 has the same type as the COND_EXPR. This avoids optimizing
13363 away "c ? x : throw", where the throw has a void type.
13364 Avoid throwing away that operand which contains label. */
13365 if ((!TREE_SIDE_EFFECTS (unused_op)
13366 || !contains_label_p (unused_op))
13367 && (! VOID_TYPE_P (TREE_TYPE (tem))
13368 || VOID_TYPE_P (type)))
13369 return pedantic_non_lvalue_loc (loc, tem);
13372 if (operand_equal_p (arg1, op2, 0))
13373 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13375 /* If we have A op B ? A : C, we may be able to convert this to a
13376 simpler expression, depending on the operation and the values
13377 of B and C. Signed zeros prevent all of these transformations,
13378 for reasons given above each one.
13380 Also try swapping the arguments and inverting the conditional. */
13381 if (COMPARISON_CLASS_P (arg0)
13382 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13383 arg1, TREE_OPERAND (arg0, 1))
13384 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13386 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13391 if (COMPARISON_CLASS_P (arg0)
13392 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13394 TREE_OPERAND (arg0, 1))
13395 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13397 tem = fold_truth_not_expr (loc, arg0);
13398 if (tem && COMPARISON_CLASS_P (tem))
13400 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13406 /* If the second operand is simpler than the third, swap them
13407 since that produces better jump optimization results. */
13408 if (truth_value_p (TREE_CODE (arg0))
13409 && tree_swap_operands_p (op1, op2, false))
13411 /* See if this can be inverted. If it can't, possibly because
13412 it was a floating-point inequality comparison, don't do
13414 tem = fold_truth_not_expr (loc, arg0);
13416 return fold_build3_loc (loc, code, type, tem, op2, op1);
13419 /* Convert A ? 1 : 0 to simply A. */
13420 if (integer_onep (op1)
13421 && integer_zerop (op2)
13422 /* If we try to convert OP0 to our type, the
13423 call to fold will try to move the conversion inside
13424 a COND, which will recurse. In that case, the COND_EXPR
13425 is probably the best choice, so leave it alone. */
13426 && type == TREE_TYPE (arg0))
13427 return pedantic_non_lvalue_loc (loc, arg0);
13429 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13430 over COND_EXPR in cases such as floating point comparisons. */
13431 if (integer_zerop (op1)
13432 && integer_onep (op2)
13433 && truth_value_p (TREE_CODE (arg0)))
13434 return pedantic_non_lvalue_loc (loc,
13435 fold_convert_loc (loc, type,
13436 invert_truthvalue_loc (loc,
13439 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13440 if (TREE_CODE (arg0) == LT_EXPR
13441 && integer_zerop (TREE_OPERAND (arg0, 1))
13442 && integer_zerop (op2)
13443 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13445 /* sign_bit_p only checks ARG1 bits within A's precision.
13446 If <sign bit of A> has wider type than A, bits outside
13447 of A's precision in <sign bit of A> need to be checked.
13448 If they are all 0, this optimization needs to be done
13449 in unsigned A's type, if they are all 1 in signed A's type,
13450 otherwise this can't be done. */
13451 if (TYPE_PRECISION (TREE_TYPE (tem))
13452 < TYPE_PRECISION (TREE_TYPE (arg1))
13453 && TYPE_PRECISION (TREE_TYPE (tem))
13454 < TYPE_PRECISION (type))
13456 unsigned HOST_WIDE_INT mask_lo;
13457 HOST_WIDE_INT mask_hi;
13458 int inner_width, outer_width;
13461 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13462 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13463 if (outer_width > TYPE_PRECISION (type))
13464 outer_width = TYPE_PRECISION (type);
13466 if (outer_width > HOST_BITS_PER_WIDE_INT)
13468 mask_hi = ((unsigned HOST_WIDE_INT) -1
13469 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13475 mask_lo = ((unsigned HOST_WIDE_INT) -1
13476 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13478 if (inner_width > HOST_BITS_PER_WIDE_INT)
13480 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13481 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13485 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13486 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13488 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13489 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13491 tem_type = signed_type_for (TREE_TYPE (tem));
13492 tem = fold_convert_loc (loc, tem_type, tem);
13494 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13495 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13497 tem_type = unsigned_type_for (TREE_TYPE (tem));
13498 tem = fold_convert_loc (loc, tem_type, tem);
13506 fold_convert_loc (loc, type,
13507 fold_build2_loc (loc, BIT_AND_EXPR,
13508 TREE_TYPE (tem), tem,
13509 fold_convert_loc (loc,
13514 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13515 already handled above. */
13516 if (TREE_CODE (arg0) == BIT_AND_EXPR
13517 && integer_onep (TREE_OPERAND (arg0, 1))
13518 && integer_zerop (op2)
13519 && integer_pow2p (arg1))
13521 tree tem = TREE_OPERAND (arg0, 0);
13523 if (TREE_CODE (tem) == RSHIFT_EXPR
13524 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13525 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13526 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13527 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13528 TREE_OPERAND (tem, 0), arg1);
13531 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13532 is probably obsolete because the first operand should be a
13533 truth value (that's why we have the two cases above), but let's
13534 leave it in until we can confirm this for all front-ends. */
13535 if (integer_zerop (op2)
13536 && TREE_CODE (arg0) == NE_EXPR
13537 && integer_zerop (TREE_OPERAND (arg0, 1))
13538 && integer_pow2p (arg1)
13539 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13540 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13541 arg1, OEP_ONLY_CONST))
13542 return pedantic_non_lvalue_loc (loc,
13543 fold_convert_loc (loc, type,
13544 TREE_OPERAND (arg0, 0)));
13546 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13547 if (integer_zerop (op2)
13548 && truth_value_p (TREE_CODE (arg0))
13549 && truth_value_p (TREE_CODE (arg1)))
13550 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13551 fold_convert_loc (loc, type, arg0),
13554 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13555 if (integer_onep (op2)
13556 && truth_value_p (TREE_CODE (arg0))
13557 && truth_value_p (TREE_CODE (arg1)))
13559 /* Only perform transformation if ARG0 is easily inverted. */
13560 tem = fold_truth_not_expr (loc, arg0);
13562 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13563 fold_convert_loc (loc, type, tem),
13567 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13568 if (integer_zerop (arg1)
13569 && truth_value_p (TREE_CODE (arg0))
13570 && truth_value_p (TREE_CODE (op2)))
13572 /* Only perform transformation if ARG0 is easily inverted. */
13573 tem = fold_truth_not_expr (loc, arg0);
13575 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13576 fold_convert_loc (loc, type, tem),
13580 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13581 if (integer_onep (arg1)
13582 && truth_value_p (TREE_CODE (arg0))
13583 && truth_value_p (TREE_CODE (op2)))
13584 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13585 fold_convert_loc (loc, type, arg0),
13591 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13592 of fold_ternary on them. */
13593 gcc_unreachable ();
13595 case BIT_FIELD_REF:
13596 if ((TREE_CODE (arg0) == VECTOR_CST
13597 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13598 && type == TREE_TYPE (TREE_TYPE (arg0)))
13600 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13601 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13604 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13605 && (idx % width) == 0
13606 && (idx = idx / width)
13607 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13609 tree elements = NULL_TREE;
13611 if (TREE_CODE (arg0) == VECTOR_CST)
13612 elements = TREE_VECTOR_CST_ELTS (arg0);
13615 unsigned HOST_WIDE_INT idx;
13618 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13619 elements = tree_cons (NULL_TREE, value, elements);
13621 while (idx-- > 0 && elements)
13622 elements = TREE_CHAIN (elements);
13624 return TREE_VALUE (elements);
13626 return build_zero_cst (type);
13630 /* A bit-field-ref that referenced the full argument can be stripped. */
13631 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13632 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13633 && integer_zerop (op2))
13634 return fold_convert_loc (loc, type, arg0);
13639 /* For integers we can decompose the FMA if possible. */
13640 if (TREE_CODE (arg0) == INTEGER_CST
13641 && TREE_CODE (arg1) == INTEGER_CST)
13642 return fold_build2_loc (loc, PLUS_EXPR, type,
13643 const_binop (MULT_EXPR, arg0, arg1), arg2);
13644 if (integer_zerop (arg2))
13645 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13647 return fold_fma (loc, type, arg0, arg1, arg2);
13651 } /* switch (code) */
13654 /* Perform constant folding and related simplification of EXPR.
13655 The related simplifications include x*1 => x, x*0 => 0, etc.,
13656 and application of the associative law.
13657 NOP_EXPR conversions may be removed freely (as long as we
13658 are careful not to change the type of the overall expression).
13659 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13660 but we can constant-fold them if they have constant operands. */
13662 #ifdef ENABLE_FOLD_CHECKING
13663 # define fold(x) fold_1 (x)
13664 static tree fold_1 (tree);
13670 const tree t = expr;
13671 enum tree_code code = TREE_CODE (t);
13672 enum tree_code_class kind = TREE_CODE_CLASS (code);
13674 location_t loc = EXPR_LOCATION (expr);
13676 /* Return right away if a constant. */
13677 if (kind == tcc_constant)
13680 /* CALL_EXPR-like objects with variable numbers of operands are
13681 treated specially. */
13682 if (kind == tcc_vl_exp)
13684 if (code == CALL_EXPR)
13686 tem = fold_call_expr (loc, expr, false);
13687 return tem ? tem : expr;
13692 if (IS_EXPR_CODE_CLASS (kind))
13694 tree type = TREE_TYPE (t);
13695 tree op0, op1, op2;
13697 switch (TREE_CODE_LENGTH (code))
13700 op0 = TREE_OPERAND (t, 0);
13701 tem = fold_unary_loc (loc, code, type, op0);
13702 return tem ? tem : expr;
13704 op0 = TREE_OPERAND (t, 0);
13705 op1 = TREE_OPERAND (t, 1);
13706 tem = fold_binary_loc (loc, code, type, op0, op1);
13707 return tem ? tem : expr;
13709 op0 = TREE_OPERAND (t, 0);
13710 op1 = TREE_OPERAND (t, 1);
13711 op2 = TREE_OPERAND (t, 2);
13712 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13713 return tem ? tem : expr;
13723 tree op0 = TREE_OPERAND (t, 0);
13724 tree op1 = TREE_OPERAND (t, 1);
13726 if (TREE_CODE (op1) == INTEGER_CST
13727 && TREE_CODE (op0) == CONSTRUCTOR
13728 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13730 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13731 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13732 unsigned HOST_WIDE_INT begin = 0;
13734 /* Find a matching index by means of a binary search. */
13735 while (begin != end)
13737 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13738 tree index = VEC_index (constructor_elt, elts, middle)->index;
13740 if (TREE_CODE (index) == INTEGER_CST
13741 && tree_int_cst_lt (index, op1))
13742 begin = middle + 1;
13743 else if (TREE_CODE (index) == INTEGER_CST
13744 && tree_int_cst_lt (op1, index))
13746 else if (TREE_CODE (index) == RANGE_EXPR
13747 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13748 begin = middle + 1;
13749 else if (TREE_CODE (index) == RANGE_EXPR
13750 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13753 return VEC_index (constructor_elt, elts, middle)->value;
13761 return fold (DECL_INITIAL (t));
13765 } /* switch (code) */
13768 #ifdef ENABLE_FOLD_CHECKING
13771 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13772 static void fold_check_failed (const_tree, const_tree);
13773 void print_fold_checksum (const_tree);
13775 /* When --enable-checking=fold, compute a digest of expr before
13776 and after actual fold call to see if fold did not accidentally
13777 change original expr. */
13783 struct md5_ctx ctx;
13784 unsigned char checksum_before[16], checksum_after[16];
13787 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13788 md5_init_ctx (&ctx);
13789 fold_checksum_tree (expr, &ctx, ht);
13790 md5_finish_ctx (&ctx, checksum_before);
13793 ret = fold_1 (expr);
13795 md5_init_ctx (&ctx);
13796 fold_checksum_tree (expr, &ctx, ht);
13797 md5_finish_ctx (&ctx, checksum_after);
13800 if (memcmp (checksum_before, checksum_after, 16))
13801 fold_check_failed (expr, ret);
13807 print_fold_checksum (const_tree expr)
13809 struct md5_ctx ctx;
13810 unsigned char checksum[16], cnt;
13813 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13814 md5_init_ctx (&ctx);
13815 fold_checksum_tree (expr, &ctx, ht);
13816 md5_finish_ctx (&ctx, checksum);
13818 for (cnt = 0; cnt < 16; ++cnt)
13819 fprintf (stderr, "%02x", checksum[cnt]);
13820 putc ('\n', stderr);
13824 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13826 internal_error ("fold check: original tree changed by fold");
13830 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13833 enum tree_code code;
13834 union tree_node buf;
13839 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13840 <= sizeof (struct tree_function_decl))
13841 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13844 slot = (void **) htab_find_slot (ht, expr, INSERT);
13847 *slot = CONST_CAST_TREE (expr);
13848 code = TREE_CODE (expr);
13849 if (TREE_CODE_CLASS (code) == tcc_declaration
13850 && DECL_ASSEMBLER_NAME_SET_P (expr))
13852 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13853 memcpy ((char *) &buf, expr, tree_size (expr));
13854 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13855 expr = (tree) &buf;
13857 else if (TREE_CODE_CLASS (code) == tcc_type
13858 && (TYPE_POINTER_TO (expr)
13859 || TYPE_REFERENCE_TO (expr)
13860 || TYPE_CACHED_VALUES_P (expr)
13861 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13862 || TYPE_NEXT_VARIANT (expr)))
13864 /* Allow these fields to be modified. */
13866 memcpy ((char *) &buf, expr, tree_size (expr));
13867 expr = tmp = (tree) &buf;
13868 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13869 TYPE_POINTER_TO (tmp) = NULL;
13870 TYPE_REFERENCE_TO (tmp) = NULL;
13871 TYPE_NEXT_VARIANT (tmp) = NULL;
13872 if (TYPE_CACHED_VALUES_P (tmp))
13874 TYPE_CACHED_VALUES_P (tmp) = 0;
13875 TYPE_CACHED_VALUES (tmp) = NULL;
13878 md5_process_bytes (expr, tree_size (expr), ctx);
13879 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13880 if (TREE_CODE_CLASS (code) != tcc_type
13881 && TREE_CODE_CLASS (code) != tcc_declaration
13882 && code != TREE_LIST
13883 && code != SSA_NAME)
13884 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13885 switch (TREE_CODE_CLASS (code))
13891 md5_process_bytes (TREE_STRING_POINTER (expr),
13892 TREE_STRING_LENGTH (expr), ctx);
13895 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13896 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13899 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13905 case tcc_exceptional:
13909 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13910 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13911 expr = TREE_CHAIN (expr);
13912 goto recursive_label;
13915 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13916 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13922 case tcc_expression:
13923 case tcc_reference:
13924 case tcc_comparison:
13927 case tcc_statement:
13929 len = TREE_OPERAND_LENGTH (expr);
13930 for (i = 0; i < len; ++i)
13931 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13933 case tcc_declaration:
13934 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13935 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13936 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13938 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13939 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13940 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13941 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13942 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13944 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13945 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13947 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13949 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13950 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13951 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13955 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13956 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13957 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13958 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13959 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13960 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13961 if (INTEGRAL_TYPE_P (expr)
13962 || SCALAR_FLOAT_TYPE_P (expr))
13964 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13965 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13967 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13968 if (TREE_CODE (expr) == RECORD_TYPE
13969 || TREE_CODE (expr) == UNION_TYPE
13970 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13971 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13972 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13979 /* Helper function for outputting the checksum of a tree T. When
13980 debugging with gdb, you can "define mynext" to be "next" followed
13981 by "call debug_fold_checksum (op0)", then just trace down till the
13984 DEBUG_FUNCTION void
13985 debug_fold_checksum (const_tree t)
13988 unsigned char checksum[16];
13989 struct md5_ctx ctx;
13990 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13992 md5_init_ctx (&ctx);
13993 fold_checksum_tree (t, &ctx, ht);
13994 md5_finish_ctx (&ctx, checksum);
13997 for (i = 0; i < 16; i++)
13998 fprintf (stderr, "%d ", checksum[i]);
14000 fprintf (stderr, "\n");
14005 /* Fold a unary tree expression with code CODE of type TYPE with an
14006 operand OP0. LOC is the location of the resulting expression.
14007 Return a folded expression if successful. Otherwise, return a tree
14008 expression with code CODE of type TYPE with an operand OP0. */
14011 fold_build1_stat_loc (location_t loc,
14012 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14015 #ifdef ENABLE_FOLD_CHECKING
14016 unsigned char checksum_before[16], checksum_after[16];
14017 struct md5_ctx ctx;
14020 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14021 md5_init_ctx (&ctx);
14022 fold_checksum_tree (op0, &ctx, ht);
14023 md5_finish_ctx (&ctx, checksum_before);
14027 tem = fold_unary_loc (loc, code, type, op0);
14030 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14031 SET_EXPR_LOCATION (tem, loc);
14034 #ifdef ENABLE_FOLD_CHECKING
14035 md5_init_ctx (&ctx);
14036 fold_checksum_tree (op0, &ctx, ht);
14037 md5_finish_ctx (&ctx, checksum_after);
14040 if (memcmp (checksum_before, checksum_after, 16))
14041 fold_check_failed (op0, tem);
14046 /* Fold a binary tree expression with code CODE of type TYPE with
14047 operands OP0 and OP1. LOC is the location of the resulting
14048 expression. Return a folded expression if successful. Otherwise,
14049 return a tree expression with code CODE of type TYPE with operands
14053 fold_build2_stat_loc (location_t loc,
14054 enum tree_code code, tree type, tree op0, tree op1
14058 #ifdef ENABLE_FOLD_CHECKING
14059 unsigned char checksum_before_op0[16],
14060 checksum_before_op1[16],
14061 checksum_after_op0[16],
14062 checksum_after_op1[16];
14063 struct md5_ctx ctx;
14066 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14067 md5_init_ctx (&ctx);
14068 fold_checksum_tree (op0, &ctx, ht);
14069 md5_finish_ctx (&ctx, checksum_before_op0);
14072 md5_init_ctx (&ctx);
14073 fold_checksum_tree (op1, &ctx, ht);
14074 md5_finish_ctx (&ctx, checksum_before_op1);
14078 tem = fold_binary_loc (loc, code, type, op0, op1);
14081 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14082 SET_EXPR_LOCATION (tem, loc);
14085 #ifdef ENABLE_FOLD_CHECKING
14086 md5_init_ctx (&ctx);
14087 fold_checksum_tree (op0, &ctx, ht);
14088 md5_finish_ctx (&ctx, checksum_after_op0);
14091 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14092 fold_check_failed (op0, tem);
14094 md5_init_ctx (&ctx);
14095 fold_checksum_tree (op1, &ctx, ht);
14096 md5_finish_ctx (&ctx, checksum_after_op1);
14099 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14100 fold_check_failed (op1, tem);
14105 /* Fold a ternary tree expression with code CODE of type TYPE with
14106 operands OP0, OP1, and OP2. Return a folded expression if
14107 successful. Otherwise, return a tree expression with code CODE of
14108 type TYPE with operands OP0, OP1, and OP2. */
14111 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14112 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14115 #ifdef ENABLE_FOLD_CHECKING
14116 unsigned char checksum_before_op0[16],
14117 checksum_before_op1[16],
14118 checksum_before_op2[16],
14119 checksum_after_op0[16],
14120 checksum_after_op1[16],
14121 checksum_after_op2[16];
14122 struct md5_ctx ctx;
14125 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14126 md5_init_ctx (&ctx);
14127 fold_checksum_tree (op0, &ctx, ht);
14128 md5_finish_ctx (&ctx, checksum_before_op0);
14131 md5_init_ctx (&ctx);
14132 fold_checksum_tree (op1, &ctx, ht);
14133 md5_finish_ctx (&ctx, checksum_before_op1);
14136 md5_init_ctx (&ctx);
14137 fold_checksum_tree (op2, &ctx, ht);
14138 md5_finish_ctx (&ctx, checksum_before_op2);
14142 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14143 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14146 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14147 SET_EXPR_LOCATION (tem, loc);
14150 #ifdef ENABLE_FOLD_CHECKING
14151 md5_init_ctx (&ctx);
14152 fold_checksum_tree (op0, &ctx, ht);
14153 md5_finish_ctx (&ctx, checksum_after_op0);
14156 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14157 fold_check_failed (op0, tem);
14159 md5_init_ctx (&ctx);
14160 fold_checksum_tree (op1, &ctx, ht);
14161 md5_finish_ctx (&ctx, checksum_after_op1);
14164 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14165 fold_check_failed (op1, tem);
14167 md5_init_ctx (&ctx);
14168 fold_checksum_tree (op2, &ctx, ht);
14169 md5_finish_ctx (&ctx, checksum_after_op2);
14172 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14173 fold_check_failed (op2, tem);
14178 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14179 arguments in ARGARRAY, and a null static chain.
14180 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14181 of type TYPE from the given operands as constructed by build_call_array. */
14184 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14185 int nargs, tree *argarray)
14188 #ifdef ENABLE_FOLD_CHECKING
14189 unsigned char checksum_before_fn[16],
14190 checksum_before_arglist[16],
14191 checksum_after_fn[16],
14192 checksum_after_arglist[16];
14193 struct md5_ctx ctx;
14197 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14198 md5_init_ctx (&ctx);
14199 fold_checksum_tree (fn, &ctx, ht);
14200 md5_finish_ctx (&ctx, checksum_before_fn);
14203 md5_init_ctx (&ctx);
14204 for (i = 0; i < nargs; i++)
14205 fold_checksum_tree (argarray[i], &ctx, ht);
14206 md5_finish_ctx (&ctx, checksum_before_arglist);
14210 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14212 #ifdef ENABLE_FOLD_CHECKING
14213 md5_init_ctx (&ctx);
14214 fold_checksum_tree (fn, &ctx, ht);
14215 md5_finish_ctx (&ctx, checksum_after_fn);
14218 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14219 fold_check_failed (fn, tem);
14221 md5_init_ctx (&ctx);
14222 for (i = 0; i < nargs; i++)
14223 fold_checksum_tree (argarray[i], &ctx, ht);
14224 md5_finish_ctx (&ctx, checksum_after_arglist);
14227 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14228 fold_check_failed (NULL_TREE, tem);
14233 /* Perform constant folding and related simplification of initializer
14234 expression EXPR. These behave identically to "fold_buildN" but ignore
14235 potential run-time traps and exceptions that fold must preserve. */
14237 #define START_FOLD_INIT \
14238 int saved_signaling_nans = flag_signaling_nans;\
14239 int saved_trapping_math = flag_trapping_math;\
14240 int saved_rounding_math = flag_rounding_math;\
14241 int saved_trapv = flag_trapv;\
14242 int saved_folding_initializer = folding_initializer;\
14243 flag_signaling_nans = 0;\
14244 flag_trapping_math = 0;\
14245 flag_rounding_math = 0;\
14247 folding_initializer = 1;
14249 #define END_FOLD_INIT \
14250 flag_signaling_nans = saved_signaling_nans;\
14251 flag_trapping_math = saved_trapping_math;\
14252 flag_rounding_math = saved_rounding_math;\
14253 flag_trapv = saved_trapv;\
14254 folding_initializer = saved_folding_initializer;
14257 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14258 tree type, tree op)
14263 result = fold_build1_loc (loc, code, type, op);
14270 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14271 tree type, tree op0, tree op1)
14276 result = fold_build2_loc (loc, code, type, op0, op1);
14283 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14284 tree type, tree op0, tree op1, tree op2)
14289 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14296 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14297 int nargs, tree *argarray)
14302 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14308 #undef START_FOLD_INIT
14309 #undef END_FOLD_INIT
14311 /* Determine if first argument is a multiple of second argument. Return 0 if
14312 it is not, or we cannot easily determined it to be.
14314 An example of the sort of thing we care about (at this point; this routine
14315 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14316 fold cases do now) is discovering that
14318 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14324 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14326 This code also handles discovering that
14328 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14330 is a multiple of 8 so we don't have to worry about dealing with a
14331 possible remainder.
14333 Note that we *look* inside a SAVE_EXPR only to determine how it was
14334 calculated; it is not safe for fold to do much of anything else with the
14335 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14336 at run time. For example, the latter example above *cannot* be implemented
14337 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14338 evaluation time of the original SAVE_EXPR is not necessarily the same at
14339 the time the new expression is evaluated. The only optimization of this
14340 sort that would be valid is changing
14342 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14346 SAVE_EXPR (I) * SAVE_EXPR (J)
14348 (where the same SAVE_EXPR (J) is used in the original and the
14349 transformed version). */
14352 multiple_of_p (tree type, const_tree top, const_tree bottom)
14354 if (operand_equal_p (top, bottom, 0))
14357 if (TREE_CODE (type) != INTEGER_TYPE)
14360 switch (TREE_CODE (top))
14363 /* Bitwise and provides a power of two multiple. If the mask is
14364 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14365 if (!integer_pow2p (bottom))
14370 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14371 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14375 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14376 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14379 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14383 op1 = TREE_OPERAND (top, 1);
14384 /* const_binop may not detect overflow correctly,
14385 so check for it explicitly here. */
14386 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14387 > TREE_INT_CST_LOW (op1)
14388 && TREE_INT_CST_HIGH (op1) == 0
14389 && 0 != (t1 = fold_convert (type,
14390 const_binop (LSHIFT_EXPR,
14393 && !TREE_OVERFLOW (t1))
14394 return multiple_of_p (type, t1, bottom);
14399 /* Can't handle conversions from non-integral or wider integral type. */
14400 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14401 || (TYPE_PRECISION (type)
14402 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14405 /* .. fall through ... */
14408 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14411 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14412 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14415 if (TREE_CODE (bottom) != INTEGER_CST
14416 || integer_zerop (bottom)
14417 || (TYPE_UNSIGNED (type)
14418 && (tree_int_cst_sgn (top) < 0
14419 || tree_int_cst_sgn (bottom) < 0)))
14421 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14429 /* Return true if CODE or TYPE is known to be non-negative. */
14432 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14434 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14435 && truth_value_p (code))
14436 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14437 have a signed:1 type (where the value is -1 and 0). */
14442 /* Return true if (CODE OP0) is known to be non-negative. If the return
14443 value is based on the assumption that signed overflow is undefined,
14444 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14445 *STRICT_OVERFLOW_P. */
14448 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14449 bool *strict_overflow_p)
14451 if (TYPE_UNSIGNED (type))
14457 /* We can't return 1 if flag_wrapv is set because
14458 ABS_EXPR<INT_MIN> = INT_MIN. */
14459 if (!INTEGRAL_TYPE_P (type))
14461 if (TYPE_OVERFLOW_UNDEFINED (type))
14463 *strict_overflow_p = true;
14468 case NON_LVALUE_EXPR:
14470 case FIX_TRUNC_EXPR:
14471 return tree_expr_nonnegative_warnv_p (op0,
14472 strict_overflow_p);
14476 tree inner_type = TREE_TYPE (op0);
14477 tree outer_type = type;
14479 if (TREE_CODE (outer_type) == REAL_TYPE)
14481 if (TREE_CODE (inner_type) == REAL_TYPE)
14482 return tree_expr_nonnegative_warnv_p (op0,
14483 strict_overflow_p);
14484 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14486 if (TYPE_UNSIGNED (inner_type))
14488 return tree_expr_nonnegative_warnv_p (op0,
14489 strict_overflow_p);
14492 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14494 if (TREE_CODE (inner_type) == REAL_TYPE)
14495 return tree_expr_nonnegative_warnv_p (op0,
14496 strict_overflow_p);
14497 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14498 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14499 && TYPE_UNSIGNED (inner_type);
14505 return tree_simple_nonnegative_warnv_p (code, type);
14508 /* We don't know sign of `t', so be conservative and return false. */
14512 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14513 value is based on the assumption that signed overflow is undefined,
14514 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14515 *STRICT_OVERFLOW_P. */
14518 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14519 tree op1, bool *strict_overflow_p)
14521 if (TYPE_UNSIGNED (type))
14526 case POINTER_PLUS_EXPR:
14528 if (FLOAT_TYPE_P (type))
14529 return (tree_expr_nonnegative_warnv_p (op0,
14531 && tree_expr_nonnegative_warnv_p (op1,
14532 strict_overflow_p));
14534 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14535 both unsigned and at least 2 bits shorter than the result. */
14536 if (TREE_CODE (type) == INTEGER_TYPE
14537 && TREE_CODE (op0) == NOP_EXPR
14538 && TREE_CODE (op1) == NOP_EXPR)
14540 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14541 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14542 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14543 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14545 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14546 TYPE_PRECISION (inner2)) + 1;
14547 return prec < TYPE_PRECISION (type);
14553 if (FLOAT_TYPE_P (type))
14555 /* x * x for floating point x is always non-negative. */
14556 if (operand_equal_p (op0, op1, 0))
14558 return (tree_expr_nonnegative_warnv_p (op0,
14560 && tree_expr_nonnegative_warnv_p (op1,
14561 strict_overflow_p));
14564 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14565 both unsigned and their total bits is shorter than the result. */
14566 if (TREE_CODE (type) == INTEGER_TYPE
14567 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14568 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14570 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14571 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14573 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14574 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14577 bool unsigned0 = TYPE_UNSIGNED (inner0);
14578 bool unsigned1 = TYPE_UNSIGNED (inner1);
14580 if (TREE_CODE (op0) == INTEGER_CST)
14581 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14583 if (TREE_CODE (op1) == INTEGER_CST)
14584 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14586 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14587 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14589 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14590 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14591 : TYPE_PRECISION (inner0);
14593 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14594 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14595 : TYPE_PRECISION (inner1);
14597 return precision0 + precision1 < TYPE_PRECISION (type);
14604 return (tree_expr_nonnegative_warnv_p (op0,
14606 || tree_expr_nonnegative_warnv_p (op1,
14607 strict_overflow_p));
14613 case TRUNC_DIV_EXPR:
14614 case CEIL_DIV_EXPR:
14615 case FLOOR_DIV_EXPR:
14616 case ROUND_DIV_EXPR:
14617 return (tree_expr_nonnegative_warnv_p (op0,
14619 && tree_expr_nonnegative_warnv_p (op1,
14620 strict_overflow_p));
14622 case TRUNC_MOD_EXPR:
14623 case CEIL_MOD_EXPR:
14624 case FLOOR_MOD_EXPR:
14625 case ROUND_MOD_EXPR:
14626 return tree_expr_nonnegative_warnv_p (op0,
14627 strict_overflow_p);
14629 return tree_simple_nonnegative_warnv_p (code, type);
14632 /* We don't know sign of `t', so be conservative and return false. */
14636 /* Return true if T is known to be non-negative. If the return
14637 value is based on the assumption that signed overflow is undefined,
14638 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14639 *STRICT_OVERFLOW_P. */
14642 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14644 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14647 switch (TREE_CODE (t))
14650 return tree_int_cst_sgn (t) >= 0;
14653 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14656 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14659 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14661 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14662 strict_overflow_p));
14664 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14667 /* We don't know sign of `t', so be conservative and return false. */
14671 /* Return true if T is known to be non-negative. If the return
14672 value is based on the assumption that signed overflow is undefined,
14673 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14674 *STRICT_OVERFLOW_P. */
14677 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14678 tree arg0, tree arg1, bool *strict_overflow_p)
14680 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14681 switch (DECL_FUNCTION_CODE (fndecl))
14683 CASE_FLT_FN (BUILT_IN_ACOS):
14684 CASE_FLT_FN (BUILT_IN_ACOSH):
14685 CASE_FLT_FN (BUILT_IN_CABS):
14686 CASE_FLT_FN (BUILT_IN_COSH):
14687 CASE_FLT_FN (BUILT_IN_ERFC):
14688 CASE_FLT_FN (BUILT_IN_EXP):
14689 CASE_FLT_FN (BUILT_IN_EXP10):
14690 CASE_FLT_FN (BUILT_IN_EXP2):
14691 CASE_FLT_FN (BUILT_IN_FABS):
14692 CASE_FLT_FN (BUILT_IN_FDIM):
14693 CASE_FLT_FN (BUILT_IN_HYPOT):
14694 CASE_FLT_FN (BUILT_IN_POW10):
14695 CASE_INT_FN (BUILT_IN_FFS):
14696 CASE_INT_FN (BUILT_IN_PARITY):
14697 CASE_INT_FN (BUILT_IN_POPCOUNT):
14698 case BUILT_IN_BSWAP32:
14699 case BUILT_IN_BSWAP64:
14703 CASE_FLT_FN (BUILT_IN_SQRT):
14704 /* sqrt(-0.0) is -0.0. */
14705 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14707 return tree_expr_nonnegative_warnv_p (arg0,
14708 strict_overflow_p);
14710 CASE_FLT_FN (BUILT_IN_ASINH):
14711 CASE_FLT_FN (BUILT_IN_ATAN):
14712 CASE_FLT_FN (BUILT_IN_ATANH):
14713 CASE_FLT_FN (BUILT_IN_CBRT):
14714 CASE_FLT_FN (BUILT_IN_CEIL):
14715 CASE_FLT_FN (BUILT_IN_ERF):
14716 CASE_FLT_FN (BUILT_IN_EXPM1):
14717 CASE_FLT_FN (BUILT_IN_FLOOR):
14718 CASE_FLT_FN (BUILT_IN_FMOD):
14719 CASE_FLT_FN (BUILT_IN_FREXP):
14720 CASE_FLT_FN (BUILT_IN_LCEIL):
14721 CASE_FLT_FN (BUILT_IN_LDEXP):
14722 CASE_FLT_FN (BUILT_IN_LFLOOR):
14723 CASE_FLT_FN (BUILT_IN_LLCEIL):
14724 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14725 CASE_FLT_FN (BUILT_IN_LLRINT):
14726 CASE_FLT_FN (BUILT_IN_LLROUND):
14727 CASE_FLT_FN (BUILT_IN_LRINT):
14728 CASE_FLT_FN (BUILT_IN_LROUND):
14729 CASE_FLT_FN (BUILT_IN_MODF):
14730 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14731 CASE_FLT_FN (BUILT_IN_RINT):
14732 CASE_FLT_FN (BUILT_IN_ROUND):
14733 CASE_FLT_FN (BUILT_IN_SCALB):
14734 CASE_FLT_FN (BUILT_IN_SCALBLN):
14735 CASE_FLT_FN (BUILT_IN_SCALBN):
14736 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14737 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14738 CASE_FLT_FN (BUILT_IN_SINH):
14739 CASE_FLT_FN (BUILT_IN_TANH):
14740 CASE_FLT_FN (BUILT_IN_TRUNC):
14741 /* True if the 1st argument is nonnegative. */
14742 return tree_expr_nonnegative_warnv_p (arg0,
14743 strict_overflow_p);
14745 CASE_FLT_FN (BUILT_IN_FMAX):
14746 /* True if the 1st OR 2nd arguments are nonnegative. */
14747 return (tree_expr_nonnegative_warnv_p (arg0,
14749 || (tree_expr_nonnegative_warnv_p (arg1,
14750 strict_overflow_p)));
14752 CASE_FLT_FN (BUILT_IN_FMIN):
14753 /* True if the 1st AND 2nd arguments are nonnegative. */
14754 return (tree_expr_nonnegative_warnv_p (arg0,
14756 && (tree_expr_nonnegative_warnv_p (arg1,
14757 strict_overflow_p)));
14759 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14760 /* True if the 2nd argument is nonnegative. */
14761 return tree_expr_nonnegative_warnv_p (arg1,
14762 strict_overflow_p);
14764 CASE_FLT_FN (BUILT_IN_POWI):
14765 /* True if the 1st argument is nonnegative or the second
14766 argument is an even integer. */
14767 if (TREE_CODE (arg1) == INTEGER_CST
14768 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14770 return tree_expr_nonnegative_warnv_p (arg0,
14771 strict_overflow_p);
14773 CASE_FLT_FN (BUILT_IN_POW):
14774 /* True if the 1st argument is nonnegative or the second
14775 argument is an even integer valued real. */
14776 if (TREE_CODE (arg1) == REAL_CST)
14781 c = TREE_REAL_CST (arg1);
14782 n = real_to_integer (&c);
14785 REAL_VALUE_TYPE cint;
14786 real_from_integer (&cint, VOIDmode, n,
14787 n < 0 ? -1 : 0, 0);
14788 if (real_identical (&c, &cint))
14792 return tree_expr_nonnegative_warnv_p (arg0,
14793 strict_overflow_p);
14798 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14802 /* Return true if T is known to be non-negative. If the return
14803 value is based on the assumption that signed overflow is undefined,
14804 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14805 *STRICT_OVERFLOW_P. */
14808 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14810 enum tree_code code = TREE_CODE (t);
14811 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14818 tree temp = TARGET_EXPR_SLOT (t);
14819 t = TARGET_EXPR_INITIAL (t);
14821 /* If the initializer is non-void, then it's a normal expression
14822 that will be assigned to the slot. */
14823 if (!VOID_TYPE_P (t))
14824 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14826 /* Otherwise, the initializer sets the slot in some way. One common
14827 way is an assignment statement at the end of the initializer. */
14830 if (TREE_CODE (t) == BIND_EXPR)
14831 t = expr_last (BIND_EXPR_BODY (t));
14832 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14833 || TREE_CODE (t) == TRY_CATCH_EXPR)
14834 t = expr_last (TREE_OPERAND (t, 0));
14835 else if (TREE_CODE (t) == STATEMENT_LIST)
14840 if (TREE_CODE (t) == MODIFY_EXPR
14841 && TREE_OPERAND (t, 0) == temp)
14842 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14843 strict_overflow_p);
14850 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14851 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14853 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14854 get_callee_fndecl (t),
14857 strict_overflow_p);
14859 case COMPOUND_EXPR:
14861 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14862 strict_overflow_p);
14864 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14865 strict_overflow_p);
14867 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14868 strict_overflow_p);
14871 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14875 /* We don't know sign of `t', so be conservative and return false. */
14879 /* Return true if T is known to be non-negative. If the return
14880 value is based on the assumption that signed overflow is undefined,
14881 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14882 *STRICT_OVERFLOW_P. */
14885 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14887 enum tree_code code;
14888 if (t == error_mark_node)
14891 code = TREE_CODE (t);
14892 switch (TREE_CODE_CLASS (code))
14895 case tcc_comparison:
14896 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14898 TREE_OPERAND (t, 0),
14899 TREE_OPERAND (t, 1),
14900 strict_overflow_p);
14903 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14905 TREE_OPERAND (t, 0),
14906 strict_overflow_p);
14909 case tcc_declaration:
14910 case tcc_reference:
14911 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14919 case TRUTH_AND_EXPR:
14920 case TRUTH_OR_EXPR:
14921 case TRUTH_XOR_EXPR:
14922 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14924 TREE_OPERAND (t, 0),
14925 TREE_OPERAND (t, 1),
14926 strict_overflow_p);
14927 case TRUTH_NOT_EXPR:
14928 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14930 TREE_OPERAND (t, 0),
14931 strict_overflow_p);
14938 case WITH_SIZE_EXPR:
14940 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14943 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14947 /* Return true if `t' is known to be non-negative. Handle warnings
14948 about undefined signed overflow. */
14951 tree_expr_nonnegative_p (tree t)
14953 bool ret, strict_overflow_p;
14955 strict_overflow_p = false;
14956 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14957 if (strict_overflow_p)
14958 fold_overflow_warning (("assuming signed overflow does not occur when "
14959 "determining that expression is always "
14961 WARN_STRICT_OVERFLOW_MISC);
14966 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14967 For floating point we further ensure that T is not denormal.
14968 Similar logic is present in nonzero_address in rtlanal.h.
14970 If the return value is based on the assumption that signed overflow
14971 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14972 change *STRICT_OVERFLOW_P. */
14975 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14976 bool *strict_overflow_p)
14981 return tree_expr_nonzero_warnv_p (op0,
14982 strict_overflow_p);
14986 tree inner_type = TREE_TYPE (op0);
14987 tree outer_type = type;
14989 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14990 && tree_expr_nonzero_warnv_p (op0,
14991 strict_overflow_p));
14995 case NON_LVALUE_EXPR:
14996 return tree_expr_nonzero_warnv_p (op0,
14997 strict_overflow_p);
15006 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15007 For floating point we further ensure that T is not denormal.
15008 Similar logic is present in nonzero_address in rtlanal.h.
15010 If the return value is based on the assumption that signed overflow
15011 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15012 change *STRICT_OVERFLOW_P. */
15015 tree_binary_nonzero_warnv_p (enum tree_code code,
15018 tree op1, bool *strict_overflow_p)
15020 bool sub_strict_overflow_p;
15023 case POINTER_PLUS_EXPR:
15025 if (TYPE_OVERFLOW_UNDEFINED (type))
15027 /* With the presence of negative values it is hard
15028 to say something. */
15029 sub_strict_overflow_p = false;
15030 if (!tree_expr_nonnegative_warnv_p (op0,
15031 &sub_strict_overflow_p)
15032 || !tree_expr_nonnegative_warnv_p (op1,
15033 &sub_strict_overflow_p))
15035 /* One of operands must be positive and the other non-negative. */
15036 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15037 overflows, on a twos-complement machine the sum of two
15038 nonnegative numbers can never be zero. */
15039 return (tree_expr_nonzero_warnv_p (op0,
15041 || tree_expr_nonzero_warnv_p (op1,
15042 strict_overflow_p));
15047 if (TYPE_OVERFLOW_UNDEFINED (type))
15049 if (tree_expr_nonzero_warnv_p (op0,
15051 && tree_expr_nonzero_warnv_p (op1,
15052 strict_overflow_p))
15054 *strict_overflow_p = true;
15061 sub_strict_overflow_p = false;
15062 if (tree_expr_nonzero_warnv_p (op0,
15063 &sub_strict_overflow_p)
15064 && tree_expr_nonzero_warnv_p (op1,
15065 &sub_strict_overflow_p))
15067 if (sub_strict_overflow_p)
15068 *strict_overflow_p = true;
15073 sub_strict_overflow_p = false;
15074 if (tree_expr_nonzero_warnv_p (op0,
15075 &sub_strict_overflow_p))
15077 if (sub_strict_overflow_p)
15078 *strict_overflow_p = true;
15080 /* When both operands are nonzero, then MAX must be too. */
15081 if (tree_expr_nonzero_warnv_p (op1,
15082 strict_overflow_p))
15085 /* MAX where operand 0 is positive is positive. */
15086 return tree_expr_nonnegative_warnv_p (op0,
15087 strict_overflow_p);
15089 /* MAX where operand 1 is positive is positive. */
15090 else if (tree_expr_nonzero_warnv_p (op1,
15091 &sub_strict_overflow_p)
15092 && tree_expr_nonnegative_warnv_p (op1,
15093 &sub_strict_overflow_p))
15095 if (sub_strict_overflow_p)
15096 *strict_overflow_p = true;
15102 return (tree_expr_nonzero_warnv_p (op1,
15104 || tree_expr_nonzero_warnv_p (op0,
15105 strict_overflow_p));
15114 /* Return true when T is an address and is known to be nonzero.
15115 For floating point we further ensure that T is not denormal.
15116 Similar logic is present in nonzero_address in rtlanal.h.
15118 If the return value is based on the assumption that signed overflow
15119 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15120 change *STRICT_OVERFLOW_P. */
15123 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15125 bool sub_strict_overflow_p;
15126 switch (TREE_CODE (t))
15129 return !integer_zerop (t);
15133 tree base = TREE_OPERAND (t, 0);
15134 if (!DECL_P (base))
15135 base = get_base_address (base);
15140 /* Weak declarations may link to NULL. Other things may also be NULL
15141 so protect with -fdelete-null-pointer-checks; but not variables
15142 allocated on the stack. */
15144 && (flag_delete_null_pointer_checks
15145 || (DECL_CONTEXT (base)
15146 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15147 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15148 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15150 /* Constants are never weak. */
15151 if (CONSTANT_CLASS_P (base))
15158 sub_strict_overflow_p = false;
15159 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15160 &sub_strict_overflow_p)
15161 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15162 &sub_strict_overflow_p))
15164 if (sub_strict_overflow_p)
15165 *strict_overflow_p = true;
15176 /* Return true when T is an address and is known to be nonzero.
15177 For floating point we further ensure that T is not denormal.
15178 Similar logic is present in nonzero_address in rtlanal.h.
15180 If the return value is based on the assumption that signed overflow
15181 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15182 change *STRICT_OVERFLOW_P. */
15185 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15187 tree type = TREE_TYPE (t);
15188 enum tree_code code;
15190 /* Doing something useful for floating point would need more work. */
15191 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15194 code = TREE_CODE (t);
15195 switch (TREE_CODE_CLASS (code))
15198 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15199 strict_overflow_p);
15201 case tcc_comparison:
15202 return tree_binary_nonzero_warnv_p (code, type,
15203 TREE_OPERAND (t, 0),
15204 TREE_OPERAND (t, 1),
15205 strict_overflow_p);
15207 case tcc_declaration:
15208 case tcc_reference:
15209 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15217 case TRUTH_NOT_EXPR:
15218 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15219 strict_overflow_p);
15221 case TRUTH_AND_EXPR:
15222 case TRUTH_OR_EXPR:
15223 case TRUTH_XOR_EXPR:
15224 return tree_binary_nonzero_warnv_p (code, type,
15225 TREE_OPERAND (t, 0),
15226 TREE_OPERAND (t, 1),
15227 strict_overflow_p);
15234 case WITH_SIZE_EXPR:
15236 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15238 case COMPOUND_EXPR:
15241 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15242 strict_overflow_p);
15245 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15246 strict_overflow_p);
15249 return alloca_call_p (t);
15257 /* Return true when T is an address and is known to be nonzero.
15258 Handle warnings about undefined signed overflow. */
15261 tree_expr_nonzero_p (tree t)
15263 bool ret, strict_overflow_p;
15265 strict_overflow_p = false;
15266 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15267 if (strict_overflow_p)
15268 fold_overflow_warning (("assuming signed overflow does not occur when "
15269 "determining that expression is always "
15271 WARN_STRICT_OVERFLOW_MISC);
15275 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15276 attempt to fold the expression to a constant without modifying TYPE,
15279 If the expression could be simplified to a constant, then return
15280 the constant. If the expression would not be simplified to a
15281 constant, then return NULL_TREE. */
15284 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15286 tree tem = fold_binary (code, type, op0, op1);
15287 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15290 /* Given the components of a unary expression CODE, TYPE and OP0,
15291 attempt to fold the expression to a constant without modifying
15294 If the expression could be simplified to a constant, then return
15295 the constant. If the expression would not be simplified to a
15296 constant, then return NULL_TREE. */
15299 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15301 tree tem = fold_unary (code, type, op0);
15302 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15305 /* If EXP represents referencing an element in a constant string
15306 (either via pointer arithmetic or array indexing), return the
15307 tree representing the value accessed, otherwise return NULL. */
15310 fold_read_from_constant_string (tree exp)
15312 if ((TREE_CODE (exp) == INDIRECT_REF
15313 || TREE_CODE (exp) == ARRAY_REF)
15314 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15316 tree exp1 = TREE_OPERAND (exp, 0);
15319 location_t loc = EXPR_LOCATION (exp);
15321 if (TREE_CODE (exp) == INDIRECT_REF)
15322 string = string_constant (exp1, &index);
15325 tree low_bound = array_ref_low_bound (exp);
15326 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15328 /* Optimize the special-case of a zero lower bound.
15330 We convert the low_bound to sizetype to avoid some problems
15331 with constant folding. (E.g. suppose the lower bound is 1,
15332 and its mode is QI. Without the conversion,l (ARRAY
15333 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15334 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15335 if (! integer_zerop (low_bound))
15336 index = size_diffop_loc (loc, index,
15337 fold_convert_loc (loc, sizetype, low_bound));
15343 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15344 && TREE_CODE (string) == STRING_CST
15345 && TREE_CODE (index) == INTEGER_CST
15346 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15347 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15349 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15350 return build_int_cst_type (TREE_TYPE (exp),
15351 (TREE_STRING_POINTER (string)
15352 [TREE_INT_CST_LOW (index)]));
15357 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15358 an integer constant, real, or fixed-point constant.
15360 TYPE is the type of the result. */
15363 fold_negate_const (tree arg0, tree type)
15365 tree t = NULL_TREE;
15367 switch (TREE_CODE (arg0))
15371 double_int val = tree_to_double_int (arg0);
15372 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15374 t = force_fit_type_double (type, val, 1,
15375 (overflow | TREE_OVERFLOW (arg0))
15376 && !TYPE_UNSIGNED (type));
15381 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15386 FIXED_VALUE_TYPE f;
15387 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15388 &(TREE_FIXED_CST (arg0)), NULL,
15389 TYPE_SATURATING (type));
15390 t = build_fixed (type, f);
15391 /* Propagate overflow flags. */
15392 if (overflow_p | TREE_OVERFLOW (arg0))
15393 TREE_OVERFLOW (t) = 1;
15398 gcc_unreachable ();
15404 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15405 an integer constant or real constant.
15407 TYPE is the type of the result. */
15410 fold_abs_const (tree arg0, tree type)
15412 tree t = NULL_TREE;
15414 switch (TREE_CODE (arg0))
15418 double_int val = tree_to_double_int (arg0);
15420 /* If the value is unsigned or non-negative, then the absolute value
15421 is the same as the ordinary value. */
15422 if (TYPE_UNSIGNED (type)
15423 || !double_int_negative_p (val))
15426 /* If the value is negative, then the absolute value is
15432 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15433 t = force_fit_type_double (type, val, -1,
15434 overflow | TREE_OVERFLOW (arg0));
15440 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15441 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15447 gcc_unreachable ();
15453 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15454 constant. TYPE is the type of the result. */
15457 fold_not_const (const_tree arg0, tree type)
15461 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15463 val = double_int_not (tree_to_double_int (arg0));
15464 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15467 /* Given CODE, a relational operator, the target type, TYPE and two
15468 constant operands OP0 and OP1, return the result of the
15469 relational operation. If the result is not a compile time
15470 constant, then return NULL_TREE. */
15473 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15475 int result, invert;
15477 /* From here on, the only cases we handle are when the result is
15478 known to be a constant. */
15480 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15482 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15483 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15485 /* Handle the cases where either operand is a NaN. */
15486 if (real_isnan (c0) || real_isnan (c1))
15496 case UNORDERED_EXPR:
15510 if (flag_trapping_math)
15516 gcc_unreachable ();
15519 return constant_boolean_node (result, type);
15522 return constant_boolean_node (real_compare (code, c0, c1), type);
15525 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15527 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15528 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15529 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15532 /* Handle equality/inequality of complex constants. */
15533 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15535 tree rcond = fold_relational_const (code, type,
15536 TREE_REALPART (op0),
15537 TREE_REALPART (op1));
15538 tree icond = fold_relational_const (code, type,
15539 TREE_IMAGPART (op0),
15540 TREE_IMAGPART (op1));
15541 if (code == EQ_EXPR)
15542 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15543 else if (code == NE_EXPR)
15544 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15549 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15551 To compute GT, swap the arguments and do LT.
15552 To compute GE, do LT and invert the result.
15553 To compute LE, swap the arguments, do LT and invert the result.
15554 To compute NE, do EQ and invert the result.
15556 Therefore, the code below must handle only EQ and LT. */
15558 if (code == LE_EXPR || code == GT_EXPR)
15563 code = swap_tree_comparison (code);
15566 /* Note that it is safe to invert for real values here because we
15567 have already handled the one case that it matters. */
15570 if (code == NE_EXPR || code == GE_EXPR)
15573 code = invert_tree_comparison (code, false);
15576 /* Compute a result for LT or EQ if args permit;
15577 Otherwise return T. */
15578 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15580 if (code == EQ_EXPR)
15581 result = tree_int_cst_equal (op0, op1);
15582 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15583 result = INT_CST_LT_UNSIGNED (op0, op1);
15585 result = INT_CST_LT (op0, op1);
15592 return constant_boolean_node (result, type);
15595 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15596 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15600 fold_build_cleanup_point_expr (tree type, tree expr)
15602 /* If the expression does not have side effects then we don't have to wrap
15603 it with a cleanup point expression. */
15604 if (!TREE_SIDE_EFFECTS (expr))
15607 /* If the expression is a return, check to see if the expression inside the
15608 return has no side effects or the right hand side of the modify expression
15609 inside the return. If either don't have side effects set we don't need to
15610 wrap the expression in a cleanup point expression. Note we don't check the
15611 left hand side of the modify because it should always be a return decl. */
15612 if (TREE_CODE (expr) == RETURN_EXPR)
15614 tree op = TREE_OPERAND (expr, 0);
15615 if (!op || !TREE_SIDE_EFFECTS (op))
15617 op = TREE_OPERAND (op, 1);
15618 if (!TREE_SIDE_EFFECTS (op))
15622 return build1 (CLEANUP_POINT_EXPR, type, expr);
15625 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15626 of an indirection through OP0, or NULL_TREE if no simplification is
15630 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15636 subtype = TREE_TYPE (sub);
15637 if (!POINTER_TYPE_P (subtype))
15640 if (TREE_CODE (sub) == ADDR_EXPR)
15642 tree op = TREE_OPERAND (sub, 0);
15643 tree optype = TREE_TYPE (op);
15644 /* *&CONST_DECL -> to the value of the const decl. */
15645 if (TREE_CODE (op) == CONST_DECL)
15646 return DECL_INITIAL (op);
15647 /* *&p => p; make sure to handle *&"str"[cst] here. */
15648 if (type == optype)
15650 tree fop = fold_read_from_constant_string (op);
15656 /* *(foo *)&fooarray => fooarray[0] */
15657 else if (TREE_CODE (optype) == ARRAY_TYPE
15658 && type == TREE_TYPE (optype))
15660 tree type_domain = TYPE_DOMAIN (optype);
15661 tree min_val = size_zero_node;
15662 if (type_domain && TYPE_MIN_VALUE (type_domain))
15663 min_val = TYPE_MIN_VALUE (type_domain);
15664 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15665 SET_EXPR_LOCATION (op0, loc);
15668 /* *(foo *)&complexfoo => __real__ complexfoo */
15669 else if (TREE_CODE (optype) == COMPLEX_TYPE
15670 && type == TREE_TYPE (optype))
15671 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15672 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15673 else if (TREE_CODE (optype) == VECTOR_TYPE
15674 && type == TREE_TYPE (optype))
15676 tree part_width = TYPE_SIZE (type);
15677 tree index = bitsize_int (0);
15678 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15682 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15683 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15685 tree op00 = TREE_OPERAND (sub, 0);
15686 tree op01 = TREE_OPERAND (sub, 1);
15689 if (TREE_CODE (op00) == ADDR_EXPR)
15692 op00 = TREE_OPERAND (op00, 0);
15693 op00type = TREE_TYPE (op00);
15695 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15696 if (TREE_CODE (op00type) == VECTOR_TYPE
15697 && type == TREE_TYPE (op00type))
15699 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15700 tree part_width = TYPE_SIZE (type);
15701 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15702 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15703 tree index = bitsize_int (indexi);
15705 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15706 return fold_build3_loc (loc,
15707 BIT_FIELD_REF, type, op00,
15708 part_width, index);
15711 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15712 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15713 && type == TREE_TYPE (op00type))
15715 tree size = TYPE_SIZE_UNIT (type);
15716 if (tree_int_cst_equal (size, op01))
15717 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15719 /* ((foo *)&fooarray)[1] => fooarray[1] */
15720 else if (TREE_CODE (op00type) == ARRAY_TYPE
15721 && type == TREE_TYPE (op00type))
15723 tree type_domain = TYPE_DOMAIN (op00type);
15724 tree min_val = size_zero_node;
15725 if (type_domain && TYPE_MIN_VALUE (type_domain))
15726 min_val = TYPE_MIN_VALUE (type_domain);
15727 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15728 TYPE_SIZE_UNIT (type));
15729 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15730 op0 = build4 (ARRAY_REF, type, op00, op01,
15731 NULL_TREE, NULL_TREE);
15732 SET_EXPR_LOCATION (op0, loc);
15738 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15739 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15740 && type == TREE_TYPE (TREE_TYPE (subtype)))
15743 tree min_val = size_zero_node;
15744 sub = build_fold_indirect_ref_loc (loc, sub);
15745 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15746 if (type_domain && TYPE_MIN_VALUE (type_domain))
15747 min_val = TYPE_MIN_VALUE (type_domain);
15748 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15749 SET_EXPR_LOCATION (op0, loc);
15756 /* Builds an expression for an indirection through T, simplifying some
15760 build_fold_indirect_ref_loc (location_t loc, tree t)
15762 tree type = TREE_TYPE (TREE_TYPE (t));
15763 tree sub = fold_indirect_ref_1 (loc, type, t);
15768 t = build1 (INDIRECT_REF, type, t);
15769 SET_EXPR_LOCATION (t, loc);
15773 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15776 fold_indirect_ref_loc (location_t loc, tree t)
15778 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15786 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15787 whose result is ignored. The type of the returned tree need not be
15788 the same as the original expression. */
15791 fold_ignored_result (tree t)
15793 if (!TREE_SIDE_EFFECTS (t))
15794 return integer_zero_node;
15797 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15800 t = TREE_OPERAND (t, 0);
15804 case tcc_comparison:
15805 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15806 t = TREE_OPERAND (t, 0);
15807 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15808 t = TREE_OPERAND (t, 1);
15813 case tcc_expression:
15814 switch (TREE_CODE (t))
15816 case COMPOUND_EXPR:
15817 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15819 t = TREE_OPERAND (t, 0);
15823 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15824 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15826 t = TREE_OPERAND (t, 0);
15839 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15840 This can only be applied to objects of a sizetype. */
15843 round_up_loc (location_t loc, tree value, int divisor)
15845 tree div = NULL_TREE;
15847 gcc_assert (divisor > 0);
15851 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15852 have to do anything. Only do this when we are not given a const,
15853 because in that case, this check is more expensive than just
15855 if (TREE_CODE (value) != INTEGER_CST)
15857 div = build_int_cst (TREE_TYPE (value), divisor);
15859 if (multiple_of_p (TREE_TYPE (value), value, div))
15863 /* If divisor is a power of two, simplify this to bit manipulation. */
15864 if (divisor == (divisor & -divisor))
15866 if (TREE_CODE (value) == INTEGER_CST)
15868 double_int val = tree_to_double_int (value);
15871 if ((val.low & (divisor - 1)) == 0)
15874 overflow_p = TREE_OVERFLOW (value);
15875 val.low &= ~(divisor - 1);
15876 val.low += divisor;
15884 return force_fit_type_double (TREE_TYPE (value), val,
15891 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15892 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15893 t = build_int_cst (TREE_TYPE (value), -divisor);
15894 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15900 div = build_int_cst (TREE_TYPE (value), divisor);
15901 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15902 value = size_binop_loc (loc, MULT_EXPR, value, div);
15908 /* Likewise, but round down. */
15911 round_down_loc (location_t loc, tree value, int divisor)
15913 tree div = NULL_TREE;
15915 gcc_assert (divisor > 0);
15919 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15920 have to do anything. Only do this when we are not given a const,
15921 because in that case, this check is more expensive than just
15923 if (TREE_CODE (value) != INTEGER_CST)
15925 div = build_int_cst (TREE_TYPE (value), divisor);
15927 if (multiple_of_p (TREE_TYPE (value), value, div))
15931 /* If divisor is a power of two, simplify this to bit manipulation. */
15932 if (divisor == (divisor & -divisor))
15936 t = build_int_cst (TREE_TYPE (value), -divisor);
15937 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15942 div = build_int_cst (TREE_TYPE (value), divisor);
15943 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15944 value = size_binop_loc (loc, MULT_EXPR, value, div);
15950 /* Returns the pointer to the base of the object addressed by EXP and
15951 extracts the information about the offset of the access, storing it
15952 to PBITPOS and POFFSET. */
15955 split_address_to_core_and_offset (tree exp,
15956 HOST_WIDE_INT *pbitpos, tree *poffset)
15959 enum machine_mode mode;
15960 int unsignedp, volatilep;
15961 HOST_WIDE_INT bitsize;
15962 location_t loc = EXPR_LOCATION (exp);
15964 if (TREE_CODE (exp) == ADDR_EXPR)
15966 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15967 poffset, &mode, &unsignedp, &volatilep,
15969 core = build_fold_addr_expr_loc (loc, core);
15975 *poffset = NULL_TREE;
15981 /* Returns true if addresses of E1 and E2 differ by a constant, false
15982 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15985 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15988 HOST_WIDE_INT bitpos1, bitpos2;
15989 tree toffset1, toffset2, tdiff, type;
15991 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15992 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15994 if (bitpos1 % BITS_PER_UNIT != 0
15995 || bitpos2 % BITS_PER_UNIT != 0
15996 || !operand_equal_p (core1, core2, 0))
15999 if (toffset1 && toffset2)
16001 type = TREE_TYPE (toffset1);
16002 if (type != TREE_TYPE (toffset2))
16003 toffset2 = fold_convert (type, toffset2);
16005 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16006 if (!cst_and_fits_in_hwi (tdiff))
16009 *diff = int_cst_value (tdiff);
16011 else if (toffset1 || toffset2)
16013 /* If only one of the offsets is non-constant, the difference cannot
16020 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16024 /* Simplify the floating point expression EXP when the sign of the
16025 result is not significant. Return NULL_TREE if no simplification
16029 fold_strip_sign_ops (tree exp)
16032 location_t loc = EXPR_LOCATION (exp);
16034 switch (TREE_CODE (exp))
16038 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16039 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16043 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16045 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16046 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16047 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16048 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16049 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16050 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16053 case COMPOUND_EXPR:
16054 arg0 = TREE_OPERAND (exp, 0);
16055 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16057 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16061 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16062 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16064 return fold_build3_loc (loc,
16065 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16066 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16067 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16072 const enum built_in_function fcode = builtin_mathfn_code (exp);
16075 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16076 /* Strip copysign function call, return the 1st argument. */
16077 arg0 = CALL_EXPR_ARG (exp, 0);
16078 arg1 = CALL_EXPR_ARG (exp, 1);
16079 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16082 /* Strip sign ops from the argument of "odd" math functions. */
16083 if (negate_mathfn_p (fcode))
16085 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16087 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);