1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int truth_value_p (enum tree_code);
109 static int operand_equal_for_comparison_p (tree, tree, tree);
110 static int twoval_comparison_p (tree, tree *, tree *, int *);
111 static tree eval_subst (tree, tree, tree, tree, tree);
112 static tree pedantic_omit_one_operand (tree, tree, tree);
113 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
157 We do that by representing the two-word integer in 4 words, with only
158 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
159 number. The value of the word is LOWPART + HIGHPART * BASE. */
162 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
163 #define HIGHPART(x) \
164 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
165 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167 /* Unpack a two-word integer into 4 words.
168 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
169 WORDS points to the array of HOST_WIDE_INTs. */
172 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 words[0] = LOWPART (low);
175 words[1] = HIGHPART (low);
176 words[2] = LOWPART (hi);
177 words[3] = HIGHPART (hi);
180 /* Pack an array of 4 words into a two-word integer.
181 WORDS points to the array of words.
182 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 *low = words[0] + words[1] * BASE;
189 *hi = words[2] + words[3] * BASE;
192 /* Force the double-word integer L1, H1 to be within the range of the
193 integer type TYPE. Stores the properly truncated and sign-extended
194 double-word integer in *LV, *HV. Returns true if the operation
195 overflows, that is, argument and result are different. */
198 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
199 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
201 unsigned HOST_WIDE_INT low0 = l1;
202 HOST_WIDE_INT high0 = h1;
204 int sign_extended_type;
206 if (POINTER_TYPE_P (type)
207 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = TYPE_PRECISION (type);
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
283 int sign_extended_type;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 unsigned HOST_WIDE_INT l;
329 h = h1 + h2 + (l < l1);
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 return (*hv & h1) < 0;
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
386 memset (prod, 0, sizeof prod);
388 for (i = 0; i < 4; i++)
391 for (j = 0; j < 4; j++)
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
407 /* Unsigned overflow is immediate. */
409 return (toplow | tophigh) != 0;
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 unsigned HOST_WIDE_INT signmask;
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
445 if (SHIFT_COUNT_TRUNCATED)
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
455 else if (count >= HOST_BITS_PER_WIDE_INT)
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
467 /* Sign extend all bits that are beyond the precision. */
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 unsigned HOST_WIDE_INT signmask;
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 if (SHIFT_COUNT_TRUNCATED)
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
516 else if (count >= HOST_BITS_PER_WIDE_INT)
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 /* Zero / sign extend all bits that are beyond the precision. */
530 if (count >= (HOST_WIDE_INT)prec)
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
629 /* Calculate quotient sign and convert operands to unsigned. */
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
643 neg_double (lden, hden, &lden, &hden);
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
650 /* This unsigned division rounds toward zero. */
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
664 memset (quo, 0, sizeof quo);
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
701 scale = BASE / (den[den_hi_sig] + 1);
703 { /* scale divisor and dividend */
705 for (i = 0; i <= 4 - 1; i++)
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
714 for (i = 0; i <= 4 - 1; i++)
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
752 for (j = 0; j <= den_hi_sig; j++)
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
774 num [num_hi_sig] += carry;
777 /* Store the quotient digit. */
782 decode (quo, lquo, hquo);
785 /* If result is negative, make it so. */
787 neg_double (*lquo, *hquo, lquo, hquo);
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
825 case ROUND_MOD_EXPR: /* round to closest integer */
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
832 /* Get absolute values. */
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
836 neg_double (lden, hden, &labs_den, &habs_den);
838 /* If (2 * abs (lrem) >= abs (lden)) */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, <wice, &htwice);
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den < ltwice)))
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 unsigned HOST_WIDE_INT int1l, int2l;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 tree type = TREE_TYPE (arg1);
885 int uns = TYPE_UNSIGNED (type);
887 int1l = TREE_INT_CST_LOW (arg1);
888 int1h = TREE_INT_CST_HIGH (arg1);
889 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
890 &obj[some_exotic_number]. */
891 if (POINTER_TYPE_P (type))
894 type = signed_type_for (type);
895 fit_double_type (int1l, int1h, &int1l, &int1h,
899 fit_double_type (int1l, int1h, &int1l, &int1h, type);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
908 return build_int_cst_wide (type, quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = code;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
975 if (gimple_no_warning_p (stmt))
978 /* Use the smallest code level when deciding to issue the
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
987 locus = input_location;
989 locus = gimple_location (stmt);
990 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1198 tree tem = strip_float_extensions (t);
1200 return negate_expr_p (tem);
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1234 fold_negate_expr (tree t)
1236 tree type = TREE_TYPE (t);
1239 switch (TREE_CODE (t))
1241 /* Convert - (~A) to A + 1. */
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 tem = fold_negate_const (t, type);
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1280 if (negate_expr_p (t))
1281 return fold_build2 (COMPLEX_EXPR, type,
1282 fold_negate_expr (TREE_OPERAND (t, 0)),
1283 fold_negate_expr (TREE_OPERAND (t, 1)));
1287 if (negate_expr_p (t))
1288 return fold_build1 (CONJ_EXPR, type,
1289 fold_negate_expr (TREE_OPERAND (t, 0)));
1293 return TREE_OPERAND (t, 0);
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2 (MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2 (MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2 (MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1329 if (TYPE_UNSIGNED (type))
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2 (TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2 (TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2 (TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert (type, negate_expr (tem));
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr (fndecl, 1, arg);
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert (type, temp);
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1439 negate_expr (tree t)
1446 type = TREE_TYPE (t);
1447 STRIP_SIGN_NOPS (t);
1449 tem = fold_negate_expr (t);
1451 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1452 return fold_convert (type, tem);
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1470 If IN is itself a literal or constant, return it as appropriate.
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1476 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1477 tree *minus_litp, int negate_p)
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in);
1488 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1489 || TREE_CODE (in) == FIXED_CST)
1491 else if (TREE_CODE (in) == code
1492 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1499 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1501 tree op0 = TREE_OPERAND (in, 0);
1502 tree op1 = TREE_OPERAND (in, 1);
1503 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1504 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1508 || TREE_CODE (op0) == FIXED_CST)
1509 *litp = op0, op0 = 0;
1510 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1511 || TREE_CODE (op1) == FIXED_CST)
1512 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1514 if (op0 != 0 && TREE_CONSTANT (op0))
1515 *conp = op0, op0 = 0;
1516 else if (op1 != 0 && TREE_CONSTANT (op1))
1517 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0 != 0 && op1 != 0)
1526 var = op1, neg_var_p = neg1_p;
1528 /* Now do any needed negations. */
1530 *minus_litp = *litp, *litp = 0;
1532 *conp = negate_expr (*conp);
1534 var = negate_expr (var);
1536 else if (TREE_CONSTANT (in))
1544 *minus_litp = *litp, *litp = 0;
1545 else if (*minus_litp)
1546 *litp = *minus_litp, *minus_litp = 0;
1547 *conp = negate_expr (*conp);
1548 var = negate_expr (var);
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1559 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1570 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1572 if (code == PLUS_EXPR)
1574 if (TREE_CODE (t1) == NEGATE_EXPR)
1575 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1576 fold_convert (type, TREE_OPERAND (t1, 0)));
1577 else if (TREE_CODE (t2) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1579 fold_convert (type, TREE_OPERAND (t2, 0)));
1580 else if (integer_zerop (t2))
1581 return fold_convert (type, t1);
1583 else if (code == MINUS_EXPR)
1585 if (integer_zerop (t2))
1586 return fold_convert (type, t1);
1589 return build2 (code, type, fold_convert (type, t1),
1590 fold_convert (type, t2));
1593 return fold_build2 (code, type, fold_convert (type, t1),
1594 fold_convert (type, t2));
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1601 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1603 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1620 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1621 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1622 && TYPE_MODE (type1) == TYPE_MODE (type2);
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1633 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1635 unsigned HOST_WIDE_INT int1l, int2l;
1636 HOST_WIDE_INT int1h, int2h;
1637 unsigned HOST_WIDE_INT low;
1639 unsigned HOST_WIDE_INT garbagel;
1640 HOST_WIDE_INT garbageh;
1642 tree type = TREE_TYPE (arg1);
1643 int uns = TYPE_UNSIGNED (type);
1645 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1648 int1l = TREE_INT_CST_LOW (arg1);
1649 int1h = TREE_INT_CST_HIGH (arg1);
1650 int2l = TREE_INT_CST_LOW (arg2);
1651 int2h = TREE_INT_CST_HIGH (arg2);
1656 low = int1l | int2l, hi = int1h | int2h;
1660 low = int1l ^ int2l, hi = int1h ^ int2h;
1664 low = int1l & int2l, hi = int1h & int2h;
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1685 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 neg_double (int2l, int2h, &low, &hi);
1690 add_double (int1l, int1h, low, hi, &low, &hi);
1691 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1695 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 if (code == CEIL_DIV_EXPR)
1710 low = int1l / int2l, hi = 0;
1714 /* ... fall through ... */
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1719 if (int2h == 0 && int2l == 1)
1721 low = int1l, hi = int1h;
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 if (code == CEIL_MOD_EXPR)
1744 low = int1l % int2l, hi = 0;
1748 /* ... fall through ... */
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1773 low = int2l, hi = int2h;
1782 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1789 TREE_OVERFLOW (t) = 1;
1793 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1794 ((!uns || is_sizetype) && overflow)
1795 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1808 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1810 /* Sanity check for the recursive cases. */
1817 if (TREE_CODE (arg1) == INTEGER_CST)
1818 return int_const_binop (code, arg1, arg2, notrunc);
1820 if (TREE_CODE (arg1) == REAL_CST)
1822 enum machine_mode mode;
1825 REAL_VALUE_TYPE value;
1826 REAL_VALUE_TYPE result;
1830 /* The following codes are handled by real_arithmetic. */
1845 d1 = TREE_REAL_CST (arg1);
1846 d2 = TREE_REAL_CST (arg2);
1848 type = TREE_TYPE (arg1);
1849 mode = TYPE_MODE (type);
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode)
1854 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code == RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2, dconst0)
1861 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1))
1868 else if (REAL_VALUE_ISNAN (d2))
1871 inexact = real_arithmetic (&value, code, &d1, &d2);
1872 real_convert (&result, mode, &value);
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode)
1878 && REAL_VALUE_ISINF (result)
1879 && !REAL_VALUE_ISINF (d1)
1880 && !REAL_VALUE_ISINF (d2))
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1907 /* The following codes are handled by fixed_arithmetic. */
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
2003 return build_complex (type, real, imag);
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2185 /* See if R is less than the lower bound or greater than the
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2202 tree ut = TYPE_MAX_VALUE (type);
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2230 double_int temp, temp_trunc;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2400 || TREE_CODE (type) == OFFSET_TYPE)
2402 if (TREE_CODE (arg1) == INTEGER_CST)
2403 return fold_convert_const_int_from_int (type, arg1);
2404 else if (TREE_CODE (arg1) == REAL_CST)
2405 return fold_convert_const_int_from_real (code, type, arg1);
2406 else if (TREE_CODE (arg1) == FIXED_CST)
2407 return fold_convert_const_int_from_fixed (type, arg1);
2409 else if (TREE_CODE (type) == REAL_TYPE)
2411 if (TREE_CODE (arg1) == INTEGER_CST)
2412 return build_real_from_int_cst (type, arg1);
2413 else if (TREE_CODE (arg1) == REAL_CST)
2414 return fold_convert_const_real_from_real (type, arg1);
2415 else if (TREE_CODE (arg1) == FIXED_CST)
2416 return fold_convert_const_real_from_fixed (type, arg1);
2418 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2420 if (TREE_CODE (arg1) == FIXED_CST)
2421 return fold_convert_const_fixed_from_fixed (type, arg1);
2422 else if (TREE_CODE (arg1) == INTEGER_CST)
2423 return fold_convert_const_fixed_from_int (type, arg1);
2424 else if (TREE_CODE (arg1) == REAL_CST)
2425 return fold_convert_const_fixed_from_real (type, arg1);
2430 /* Construct a vector of zero elements of vector type TYPE. */
2433 build_zero_vector (tree type)
2438 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2439 units = TYPE_VECTOR_SUBPARTS (type);
2442 for (i = 0; i < units; i++)
2443 list = tree_cons (NULL_TREE, elem, list);
2444 return build_vector (type, list);
2447 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2450 fold_convertible_p (const_tree type, const_tree arg)
2452 tree orig = TREE_TYPE (arg);
2457 if (TREE_CODE (arg) == ERROR_MARK
2458 || TREE_CODE (type) == ERROR_MARK
2459 || TREE_CODE (orig) == ERROR_MARK)
2462 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2465 switch (TREE_CODE (type))
2467 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2468 case POINTER_TYPE: case REFERENCE_TYPE:
2470 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2471 || TREE_CODE (orig) == OFFSET_TYPE)
2473 return (TREE_CODE (orig) == VECTOR_TYPE
2474 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2477 case FIXED_POINT_TYPE:
2481 return TREE_CODE (type) == TREE_CODE (orig);
2488 /* Convert expression ARG to type TYPE. Used by the middle-end for
2489 simple conversions in preference to calling the front-end's convert. */
2492 fold_convert (tree type, tree arg)
2494 tree orig = TREE_TYPE (arg);
2500 if (TREE_CODE (arg) == ERROR_MARK
2501 || TREE_CODE (type) == ERROR_MARK
2502 || TREE_CODE (orig) == ERROR_MARK)
2503 return error_mark_node;
2505 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2506 return fold_build1 (NOP_EXPR, type, arg);
2508 switch (TREE_CODE (type))
2510 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2513 if (TREE_CODE (arg) == INTEGER_CST)
2515 tem = fold_convert_const (NOP_EXPR, type, arg);
2516 if (tem != NULL_TREE)
2519 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 || TREE_CODE (orig) == OFFSET_TYPE)
2521 return fold_build1 (NOP_EXPR, type, arg);
2522 if (TREE_CODE (orig) == COMPLEX_TYPE)
2524 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2525 return fold_convert (type, tem);
2527 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2528 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2529 return fold_build1 (NOP_EXPR, type, arg);
2532 if (TREE_CODE (arg) == INTEGER_CST)
2534 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2535 if (tem != NULL_TREE)
2538 else if (TREE_CODE (arg) == REAL_CST)
2540 tem = fold_convert_const (NOP_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2544 else if (TREE_CODE (arg) == FIXED_CST)
2546 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2551 switch (TREE_CODE (orig))
2554 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2555 case POINTER_TYPE: case REFERENCE_TYPE:
2556 return fold_build1 (FLOAT_EXPR, type, arg);
2559 return fold_build1 (NOP_EXPR, type, arg);
2561 case FIXED_POINT_TYPE:
2562 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2565 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2566 return fold_convert (type, tem);
2572 case FIXED_POINT_TYPE:
2573 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2574 || TREE_CODE (arg) == REAL_CST)
2576 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2577 if (tem != NULL_TREE)
2581 switch (TREE_CODE (orig))
2583 case FIXED_POINT_TYPE:
2588 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2591 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2592 return fold_convert (type, tem);
2599 switch (TREE_CODE (orig))
2602 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2605 case FIXED_POINT_TYPE:
2606 return build2 (COMPLEX_EXPR, type,
2607 fold_convert (TREE_TYPE (type), arg),
2608 fold_convert (TREE_TYPE (type), integer_zero_node));
2613 if (TREE_CODE (arg) == COMPLEX_EXPR)
2615 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2616 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2617 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2620 arg = save_expr (arg);
2621 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2622 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2623 rpart = fold_convert (TREE_TYPE (type), rpart);
2624 ipart = fold_convert (TREE_TYPE (type), ipart);
2625 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2633 if (integer_zerop (arg))
2634 return build_zero_vector (type);
2635 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2636 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2637 || TREE_CODE (orig) == VECTOR_TYPE);
2638 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2641 tem = fold_ignored_result (arg);
2642 if (TREE_CODE (tem) == MODIFY_EXPR)
2644 return fold_build1 (NOP_EXPR, type, tem);
2651 /* Return false if expr can be assumed not to be an lvalue, true
2655 maybe_lvalue_p (const_tree x)
2657 /* We only need to wrap lvalue tree codes. */
2658 switch (TREE_CODE (x))
2669 case ALIGN_INDIRECT_REF:
2670 case MISALIGNED_INDIRECT_REF:
2672 case ARRAY_RANGE_REF:
2678 case PREINCREMENT_EXPR:
2679 case PREDECREMENT_EXPR:
2681 case TRY_CATCH_EXPR:
2682 case WITH_CLEANUP_EXPR:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2712 if (! maybe_lvalue_p (x))
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2726 pedantic_non_lvalue (tree x)
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2742 if (honor_nans && flag_trapping_math)
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2784 swap_tree_comparison (enum tree_code code)
2791 case UNORDERED_EXPR:
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2843 return COMPCODE_UNLT;
2845 return COMPCODE_UNEQ;
2847 return COMPCODE_UNLE;
2849 return COMPCODE_UNGT;
2851 return COMPCODE_LTGT;
2853 return COMPCODE_UNGE;
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2941 else if (flag_trapping_math)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2988 truth_value_p (enum tree_code code)
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. As pointers
3037 strictly don't have a signedness, require either two pointers or
3038 two non-pointers as well. */
3039 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3040 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3043 /* If both types don't have the same precision, then it is not safe
3045 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3051 /* In case both args are comparisons but with different comparison
3052 code, try to swap the comparison operands of one arg to produce
3053 a match and compare that variant. */
3054 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3055 && COMPARISON_CLASS_P (arg0)
3056 && COMPARISON_CLASS_P (arg1))
3058 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3060 if (TREE_CODE (arg0) == swap_code)
3061 return operand_equal_p (TREE_OPERAND (arg0, 0),
3062 TREE_OPERAND (arg1, 1), flags)
3063 && operand_equal_p (TREE_OPERAND (arg0, 1),
3064 TREE_OPERAND (arg1, 0), flags);
3067 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3068 /* This is needed for conversions and for COMPONENT_REF.
3069 Might as well play it safe and always test this. */
3070 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below. */
3082 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3083 && (TREE_CODE (arg0) == SAVE_EXPR
3084 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3087 /* Next handle constant cases, those for which we can return 1 even
3088 if ONLY_CONST is set. */
3089 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3090 switch (TREE_CODE (arg0))
3093 return tree_int_cst_equal (arg0, arg1);
3096 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3097 TREE_FIXED_CST (arg1));
3100 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3101 TREE_REAL_CST (arg1)))
3105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3107 /* If we do not distinguish between signed and unsigned zero,
3108 consider them equal. */
3109 if (real_zerop (arg0) && real_zerop (arg1))
3118 v1 = TREE_VECTOR_CST_ELTS (arg0);
3119 v2 = TREE_VECTOR_CST_ELTS (arg1);
3122 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3125 v1 = TREE_CHAIN (v1);
3126 v2 = TREE_CHAIN (v2);
3133 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3135 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3139 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3140 && ! memcmp (TREE_STRING_POINTER (arg0),
3141 TREE_STRING_POINTER (arg1),
3142 TREE_STRING_LENGTH (arg0)));
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3151 if (flags & OEP_ONLY_CONST)
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3172 case FIX_TRUNC_EXPR:
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3174 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3184 case tcc_comparison:
3186 if (OP_SAME (0) && OP_SAME (1))
3189 /* For commutative ops, allow the other order. */
3190 return (commutative_tree_code (TREE_CODE (arg0))
3191 && operand_equal_p (TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 1), flags)
3193 && operand_equal_p (TREE_OPERAND (arg0, 1),
3194 TREE_OPERAND (arg1, 0), flags));
3197 /* If either of the pointer (or reference) expressions we are
3198 dereferencing contain a side effect, these cannot be equal. */
3199 if (TREE_SIDE_EFFECTS (arg0)
3200 || TREE_SIDE_EFFECTS (arg1))
3203 switch (TREE_CODE (arg0))
3206 case ALIGN_INDIRECT_REF:
3207 case MISALIGNED_INDIRECT_REF:
3213 case ARRAY_RANGE_REF:
3214 /* Operands 2 and 3 may be null.
3215 Compare the array index by value if it is constant first as we
3216 may have different types but same value here. */
3218 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3219 TREE_OPERAND (arg1, 1))
3221 && OP_SAME_WITH_NULL (2)
3222 && OP_SAME_WITH_NULL (3));
3225 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3226 may be NULL when we're called to compare MEM_EXPRs. */
3227 return OP_SAME_WITH_NULL (0)
3229 && OP_SAME_WITH_NULL (2);
3232 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3238 case tcc_expression:
3239 switch (TREE_CODE (arg0))
3242 case TRUTH_NOT_EXPR:
3245 case TRUTH_ANDIF_EXPR:
3246 case TRUTH_ORIF_EXPR:
3247 return OP_SAME (0) && OP_SAME (1);
3249 case TRUTH_AND_EXPR:
3251 case TRUTH_XOR_EXPR:
3252 if (OP_SAME (0) && OP_SAME (1))
3255 /* Otherwise take into account this is a commutative operation. */
3256 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 1), flags)
3258 && operand_equal_p (TREE_OPERAND (arg0, 1),
3259 TREE_OPERAND (arg1, 0), flags));
3262 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3269 switch (TREE_CODE (arg0))
3272 /* If the CALL_EXPRs call different functions, then they
3273 clearly can not be equal. */
3274 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3279 unsigned int cef = call_expr_flags (arg0);
3280 if (flags & OEP_PURE_SAME)
3281 cef &= ECF_CONST | ECF_PURE;
3288 /* Now see if all the arguments are the same. */
3290 const_call_expr_arg_iterator iter0, iter1;
3292 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3293 a1 = first_const_call_expr_arg (arg1, &iter1);
3295 a0 = next_const_call_expr_arg (&iter0),
3296 a1 = next_const_call_expr_arg (&iter1))
3297 if (! operand_equal_p (a0, a1, flags))
3300 /* If we get here and both argument lists are exhausted
3301 then the CALL_EXPRs are equal. */
3302 return ! (a0 || a1);
3308 case tcc_declaration:
3309 /* Consider __builtin_sqrt equal to sqrt. */
3310 return (TREE_CODE (arg0) == FUNCTION_DECL
3311 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3312 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3313 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3320 #undef OP_SAME_WITH_NULL
3323 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3324 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3326 When in doubt, return 0. */
3329 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3331 int unsignedp1, unsignedpo;
3332 tree primarg0, primarg1, primother;
3333 unsigned int correct_width;
3335 if (operand_equal_p (arg0, arg1, 0))
3338 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3339 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3342 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3343 and see if the inner values are the same. This removes any
3344 signedness comparison, which doesn't matter here. */
3345 primarg0 = arg0, primarg1 = arg1;
3346 STRIP_NOPS (primarg0);
3347 STRIP_NOPS (primarg1);
3348 if (operand_equal_p (primarg0, primarg1, 0))
3351 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3352 actual comparison operand, ARG0.
3354 First throw away any conversions to wider types
3355 already present in the operands. */
3357 primarg1 = get_narrower (arg1, &unsignedp1);
3358 primother = get_narrower (other, &unsignedpo);
3360 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3361 if (unsignedp1 == unsignedpo
3362 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3363 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3365 tree type = TREE_TYPE (arg0);
3367 /* Make sure shorter operand is extended the right way
3368 to match the longer operand. */
3369 primarg1 = fold_convert (signed_or_unsigned_type_for
3370 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3372 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3379 /* See if ARG is an expression that is either a comparison or is performing
3380 arithmetic on comparisons. The comparisons must only be comparing
3381 two different values, which will be stored in *CVAL1 and *CVAL2; if
3382 they are nonzero it means that some operands have already been found.
3383 No variables may be used anywhere else in the expression except in the
3384 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3385 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3387 If this is true, return 1. Otherwise, return zero. */
3390 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3392 enum tree_code code = TREE_CODE (arg);
3393 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3395 /* We can handle some of the tcc_expression cases here. */
3396 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3398 else if (tclass == tcc_expression
3399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3400 || code == COMPOUND_EXPR))
3401 tclass = tcc_binary;
3403 else if (tclass == tcc_expression && code == SAVE_EXPR
3404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3406 /* If we've already found a CVAL1 or CVAL2, this expression is
3407 two complex to handle. */
3408 if (*cval1 || *cval2)
3418 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3421 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3422 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3423 cval1, cval2, save_p));
3428 case tcc_expression:
3429 if (code == COND_EXPR)
3430 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3431 cval1, cval2, save_p)
3432 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3433 cval1, cval2, save_p)
3434 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3435 cval1, cval2, save_p));
3438 case tcc_comparison:
3439 /* First see if we can handle the first operand, then the second. For
3440 the second operand, we know *CVAL1 can't be zero. It must be that
3441 one side of the comparison is each of the values; test for the
3442 case where this isn't true by failing if the two operands
3445 if (operand_equal_p (TREE_OPERAND (arg, 0),
3446 TREE_OPERAND (arg, 1), 0))
3450 *cval1 = TREE_OPERAND (arg, 0);
3451 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3453 else if (*cval2 == 0)
3454 *cval2 = TREE_OPERAND (arg, 0);
3455 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3460 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3462 else if (*cval2 == 0)
3463 *cval2 = TREE_OPERAND (arg, 1);
3464 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3476 /* ARG is a tree that is known to contain just arithmetic operations and
3477 comparisons. Evaluate the operations in the tree substituting NEW0 for
3478 any occurrence of OLD0 as an operand of a comparison and likewise for
3482 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3484 tree type = TREE_TYPE (arg);
3485 enum tree_code code = TREE_CODE (arg);
3486 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3488 /* We can handle some of the tcc_expression cases here. */
3489 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3491 else if (tclass == tcc_expression
3492 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3493 tclass = tcc_binary;
3498 return fold_build1 (code, type,
3499 eval_subst (TREE_OPERAND (arg, 0),
3500 old0, new0, old1, new1));
3503 return fold_build2 (code, type,
3504 eval_subst (TREE_OPERAND (arg, 0),
3505 old0, new0, old1, new1),
3506 eval_subst (TREE_OPERAND (arg, 1),
3507 old0, new0, old1, new1));
3509 case tcc_expression:
3513 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3516 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3519 return fold_build3 (code, type,
3520 eval_subst (TREE_OPERAND (arg, 0),
3521 old0, new0, old1, new1),
3522 eval_subst (TREE_OPERAND (arg, 1),
3523 old0, new0, old1, new1),
3524 eval_subst (TREE_OPERAND (arg, 2),
3525 old0, new0, old1, new1));
3529 /* Fall through - ??? */
3531 case tcc_comparison:
3533 tree arg0 = TREE_OPERAND (arg, 0);
3534 tree arg1 = TREE_OPERAND (arg, 1);
3536 /* We need to check both for exact equality and tree equality. The
3537 former will be true if the operand has a side-effect. In that
3538 case, we know the operand occurred exactly once. */
3540 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3542 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3545 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3547 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3550 return fold_build2 (code, type, arg0, arg1);
3558 /* Return a tree for the case when the result of an expression is RESULT
3559 converted to TYPE and OMITTED was previously an operand of the expression
3560 but is now not needed (e.g., we folded OMITTED * 0).
3562 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3563 the conversion of RESULT to TYPE. */
3566 omit_one_operand (tree type, tree result, tree omitted)
3568 tree t = fold_convert (type, result);
3570 /* If the resulting operand is an empty statement, just return the omitted
3571 statement casted to void. */
3572 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3573 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3575 if (TREE_SIDE_EFFECTS (omitted))
3576 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3578 return non_lvalue (t);
3581 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3584 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3586 tree t = fold_convert (type, result);
3588 /* If the resulting operand is an empty statement, just return the omitted
3589 statement casted to void. */
3590 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3591 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3593 if (TREE_SIDE_EFFECTS (omitted))
3594 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3596 return pedantic_non_lvalue (t);
3599 /* Return a tree for the case when the result of an expression is RESULT
3600 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3601 of the expression but are now not needed.
3603 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3604 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3605 evaluated before OMITTED2. Otherwise, if neither has side effects,
3606 just do the conversion of RESULT to TYPE. */
3609 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3611 tree t = fold_convert (type, result);
3613 if (TREE_SIDE_EFFECTS (omitted2))
3614 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3615 if (TREE_SIDE_EFFECTS (omitted1))
3616 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3618 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3622 /* Return a simplified tree node for the truth-negation of ARG. This
3623 never alters ARG itself. We assume that ARG is an operation that
3624 returns a truth value (0 or 1).
3626 FIXME: one would think we would fold the result, but it causes
3627 problems with the dominator optimizer. */
3630 fold_truth_not_expr (tree arg)
3632 tree type = TREE_TYPE (arg);
3633 enum tree_code code = TREE_CODE (arg);
3635 /* If this is a comparison, we can simply invert it, except for
3636 floating-point non-equality comparisons, in which case we just
3637 enclose a TRUTH_NOT_EXPR around what we have. */
3639 if (TREE_CODE_CLASS (code) == tcc_comparison)
3641 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3642 if (FLOAT_TYPE_P (op_type)
3643 && flag_trapping_math
3644 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3645 && code != NE_EXPR && code != EQ_EXPR)
3649 code = invert_tree_comparison (code,
3650 HONOR_NANS (TYPE_MODE (op_type)));
3651 if (code == ERROR_MARK)
3654 return build2 (code, type,
3655 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3662 return constant_boolean_node (integer_zerop (arg), type);
3664 case TRUTH_AND_EXPR:
3665 return build2 (TRUTH_OR_EXPR, type,
3666 invert_truthvalue (TREE_OPERAND (arg, 0)),
3667 invert_truthvalue (TREE_OPERAND (arg, 1)));
3670 return build2 (TRUTH_AND_EXPR, type,
3671 invert_truthvalue (TREE_OPERAND (arg, 0)),
3672 invert_truthvalue (TREE_OPERAND (arg, 1)));
3674 case TRUTH_XOR_EXPR:
3675 /* Here we can invert either operand. We invert the first operand
3676 unless the second operand is a TRUTH_NOT_EXPR in which case our
3677 result is the XOR of the first operand with the inside of the
3678 negation of the second operand. */
3680 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3681 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3682 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3684 return build2 (TRUTH_XOR_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 TREE_OPERAND (arg, 1));
3688 case TRUTH_ANDIF_EXPR:
3689 return build2 (TRUTH_ORIF_EXPR, type,
3690 invert_truthvalue (TREE_OPERAND (arg, 0)),
3691 invert_truthvalue (TREE_OPERAND (arg, 1)));
3693 case TRUTH_ORIF_EXPR:
3694 return build2 (TRUTH_ANDIF_EXPR, type,
3695 invert_truthvalue (TREE_OPERAND (arg, 0)),
3696 invert_truthvalue (TREE_OPERAND (arg, 1)));
3698 case TRUTH_NOT_EXPR:
3699 return TREE_OPERAND (arg, 0);
3703 tree arg1 = TREE_OPERAND (arg, 1);
3704 tree arg2 = TREE_OPERAND (arg, 2);
3705 /* A COND_EXPR may have a throw as one operand, which
3706 then has void type. Just leave void operands
3708 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3709 VOID_TYPE_P (TREE_TYPE (arg1))
3710 ? arg1 : invert_truthvalue (arg1),
3711 VOID_TYPE_P (TREE_TYPE (arg2))
3712 ? arg2 : invert_truthvalue (arg2));
3716 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3717 invert_truthvalue (TREE_OPERAND (arg, 1)));
3719 case NON_LVALUE_EXPR:
3720 return invert_truthvalue (TREE_OPERAND (arg, 0));
3723 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3724 return build1 (TRUTH_NOT_EXPR, type, arg);
3728 return build1 (TREE_CODE (arg), type,
3729 invert_truthvalue (TREE_OPERAND (arg, 0)));
3732 if (!integer_onep (TREE_OPERAND (arg, 1)))
3734 return build2 (EQ_EXPR, type, arg,
3735 build_int_cst (type, 0));
3738 return build1 (TRUTH_NOT_EXPR, type, arg);
3740 case CLEANUP_POINT_EXPR:
3741 return build1 (CLEANUP_POINT_EXPR, type,
3742 invert_truthvalue (TREE_OPERAND (arg, 0)));
3751 /* Return a simplified tree node for the truth-negation of ARG. This
3752 never alters ARG itself. We assume that ARG is an operation that
3753 returns a truth value (0 or 1).
3755 FIXME: one would think we would fold the result, but it causes
3756 problems with the dominator optimizer. */
3759 invert_truthvalue (tree arg)
3763 if (TREE_CODE (arg) == ERROR_MARK)
3766 tem = fold_truth_not_expr (arg);
3768 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3773 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3774 operands are another bit-wise operation with a common input. If so,
3775 distribute the bit operations to save an operation and possibly two if
3776 constants are involved. For example, convert
3777 (A | B) & (A | C) into A | (B & C)
3778 Further simplification will occur if B and C are constants.
3780 If this optimization cannot be done, 0 will be returned. */
3783 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3788 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3789 || TREE_CODE (arg0) == code
3790 || (TREE_CODE (arg0) != BIT_AND_EXPR
3791 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3794 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3796 common = TREE_OPERAND (arg0, 0);
3797 left = TREE_OPERAND (arg0, 1);
3798 right = TREE_OPERAND (arg1, 1);
3800 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3802 common = TREE_OPERAND (arg0, 0);
3803 left = TREE_OPERAND (arg0, 1);
3804 right = TREE_OPERAND (arg1, 0);
3806 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3808 common = TREE_OPERAND (arg0, 1);
3809 left = TREE_OPERAND (arg0, 0);
3810 right = TREE_OPERAND (arg1, 1);
3812 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3814 common = TREE_OPERAND (arg0, 1);
3815 left = TREE_OPERAND (arg0, 0);
3816 right = TREE_OPERAND (arg1, 0);
3821 return fold_build2 (TREE_CODE (arg0), type, common,
3822 fold_build2 (code, type, left, right));
3825 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3826 with code CODE. This optimization is unsafe. */
3828 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3830 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3831 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3833 /* (A / C) +- (B / C) -> (A +- B) / C. */
3835 && operand_equal_p (TREE_OPERAND (arg0, 1),
3836 TREE_OPERAND (arg1, 1), 0))
3837 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3838 fold_build2 (code, type,
3839 TREE_OPERAND (arg0, 0),
3840 TREE_OPERAND (arg1, 0)),
3841 TREE_OPERAND (arg0, 1));
3843 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3844 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3845 TREE_OPERAND (arg1, 0), 0)
3846 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3847 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3849 REAL_VALUE_TYPE r0, r1;
3850 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3851 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3853 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3855 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3856 real_arithmetic (&r0, code, &r0, &r1);
3857 return fold_build2 (MULT_EXPR, type,
3858 TREE_OPERAND (arg0, 0),
3859 build_real (type, r0));
3865 /* Subroutine for fold_truthop: decode a field reference.
3867 If EXP is a comparison reference, we return the innermost reference.
3869 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3870 set to the starting bit number.
3872 If the innermost field can be completely contained in a mode-sized
3873 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3875 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3876 otherwise it is not changed.
3878 *PUNSIGNEDP is set to the signedness of the field.
3880 *PMASK is set to the mask used. This is either contained in a
3881 BIT_AND_EXPR or derived from the width of the field.
3883 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3885 Return 0 if this is not a component reference or is one that we can't
3886 do anything with. */
3889 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3890 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3891 int *punsignedp, int *pvolatilep,
3892 tree *pmask, tree *pand_mask)
3894 tree outer_type = 0;
3896 tree mask, inner, offset;
3898 unsigned int precision;
3900 /* All the optimizations using this function assume integer fields.
3901 There are problems with FP fields since the type_for_size call
3902 below can fail for, e.g., XFmode. */
3903 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3906 /* We are interested in the bare arrangement of bits, so strip everything
3907 that doesn't affect the machine mode. However, record the type of the
3908 outermost expression if it may matter below. */
3909 if (CONVERT_EXPR_P (exp)
3910 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3911 outer_type = TREE_TYPE (exp);
3914 if (TREE_CODE (exp) == BIT_AND_EXPR)
3916 and_mask = TREE_OPERAND (exp, 1);
3917 exp = TREE_OPERAND (exp, 0);
3918 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3919 if (TREE_CODE (and_mask) != INTEGER_CST)
3923 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3924 punsignedp, pvolatilep, false);
3925 if ((inner == exp && and_mask == 0)
3926 || *pbitsize < 0 || offset != 0
3927 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3930 /* If the number of bits in the reference is the same as the bitsize of
3931 the outer type, then the outer type gives the signedness. Otherwise
3932 (in case of a small bitfield) the signedness is unchanged. */
3933 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3934 *punsignedp = TYPE_UNSIGNED (outer_type);
3936 /* Compute the mask to access the bitfield. */
3937 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3938 precision = TYPE_PRECISION (unsigned_type);
3940 mask = build_int_cst_type (unsigned_type, -1);
3942 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3943 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3945 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3947 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3948 fold_convert (unsigned_type, and_mask), mask);
3951 *pand_mask = and_mask;
3955 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3956 represents the sign bit of EXP's type. If EXP represents a sign
3957 or zero extension, also test VAL against the unextended type.
3958 The return value is the (sub)expression whose sign bit is VAL,
3959 or NULL_TREE otherwise. */
3962 sign_bit_p (tree exp, const_tree val)
3964 unsigned HOST_WIDE_INT mask_lo, lo;
3965 HOST_WIDE_INT mask_hi, hi;
3969 /* Tree EXP must have an integral type. */
3970 t = TREE_TYPE (exp);
3971 if (! INTEGRAL_TYPE_P (t))
3974 /* Tree VAL must be an integer constant. */
3975 if (TREE_CODE (val) != INTEGER_CST
3976 || TREE_OVERFLOW (val))
3979 width = TYPE_PRECISION (t);
3980 if (width > HOST_BITS_PER_WIDE_INT)
3982 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3985 mask_hi = ((unsigned HOST_WIDE_INT) -1
3986 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3992 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3995 mask_lo = ((unsigned HOST_WIDE_INT) -1
3996 >> (HOST_BITS_PER_WIDE_INT - width));
3999 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4000 treat VAL as if it were unsigned. */
4001 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4002 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4005 /* Handle extension from a narrower type. */
4006 if (TREE_CODE (exp) == NOP_EXPR
4007 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4008 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4013 /* Subroutine for fold_truthop: determine if an operand is simple enough
4014 to be evaluated unconditionally. */
4017 simple_operand_p (const_tree exp)
4019 /* Strip any conversions that don't change the machine mode. */
4022 return (CONSTANT_CLASS_P (exp)
4023 || TREE_CODE (exp) == SSA_NAME
4025 && ! TREE_ADDRESSABLE (exp)
4026 && ! TREE_THIS_VOLATILE (exp)
4027 && ! DECL_NONLOCAL (exp)
4028 /* Don't regard global variables as simple. They may be
4029 allocated in ways unknown to the compiler (shared memory,
4030 #pragma weak, etc). */
4031 && ! TREE_PUBLIC (exp)
4032 && ! DECL_EXTERNAL (exp)
4033 /* Loading a static variable is unduly expensive, but global
4034 registers aren't expensive. */
4035 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4038 /* The following functions are subroutines to fold_range_test and allow it to
4039 try to change a logical combination of comparisons into a range test.
4042 X == 2 || X == 3 || X == 4 || X == 5
4046 (unsigned) (X - 2) <= 3
4048 We describe each set of comparisons as being either inside or outside
4049 a range, using a variable named like IN_P, and then describe the
4050 range with a lower and upper bound. If one of the bounds is omitted,
4051 it represents either the highest or lowest value of the type.
4053 In the comments below, we represent a range by two numbers in brackets
4054 preceded by a "+" to designate being inside that range, or a "-" to
4055 designate being outside that range, so the condition can be inverted by
4056 flipping the prefix. An omitted bound is represented by a "-". For
4057 example, "- [-, 10]" means being outside the range starting at the lowest
4058 possible value and ending at 10, in other words, being greater than 10.
4059 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4062 We set up things so that the missing bounds are handled in a consistent
4063 manner so neither a missing bound nor "true" and "false" need to be
4064 handled using a special case. */
4066 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4067 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4068 and UPPER1_P are nonzero if the respective argument is an upper bound
4069 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4070 must be specified for a comparison. ARG1 will be converted to ARG0's
4071 type if both are specified. */
4074 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4075 tree arg1, int upper1_p)
4081 /* If neither arg represents infinity, do the normal operation.
4082 Else, if not a comparison, return infinity. Else handle the special
4083 comparison rules. Note that most of the cases below won't occur, but
4084 are handled for consistency. */
4086 if (arg0 != 0 && arg1 != 0)
4088 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4089 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4091 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4094 if (TREE_CODE_CLASS (code) != tcc_comparison)
4097 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4098 for neither. In real maths, we cannot assume open ended ranges are
4099 the same. But, this is computer arithmetic, where numbers are finite.
4100 We can therefore make the transformation of any unbounded range with
4101 the value Z, Z being greater than any representable number. This permits
4102 us to treat unbounded ranges as equal. */
4103 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4104 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4108 result = sgn0 == sgn1;
4111 result = sgn0 != sgn1;
4114 result = sgn0 < sgn1;
4117 result = sgn0 <= sgn1;
4120 result = sgn0 > sgn1;
4123 result = sgn0 >= sgn1;
4129 return constant_boolean_node (result, type);
4132 /* Given EXP, a logical expression, set the range it is testing into
4133 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4134 actually being tested. *PLOW and *PHIGH will be made of the same
4135 type as the returned expression. If EXP is not a comparison, we
4136 will most likely not be returning a useful value and range. Set
4137 *STRICT_OVERFLOW_P to true if the return value is only valid
4138 because signed overflow is undefined; otherwise, do not change
4139 *STRICT_OVERFLOW_P. */
4142 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4143 bool *strict_overflow_p)
4145 enum tree_code code;
4146 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4147 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4149 tree low, high, n_low, n_high;
4151 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4152 and see if we can refine the range. Some of the cases below may not
4153 happen, but it doesn't seem worth worrying about this. We "continue"
4154 the outer loop when we've changed something; otherwise we "break"
4155 the switch, which will "break" the while. */
4158 low = high = build_int_cst (TREE_TYPE (exp), 0);
4162 code = TREE_CODE (exp);
4163 exp_type = TREE_TYPE (exp);
4165 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4167 if (TREE_OPERAND_LENGTH (exp) > 0)
4168 arg0 = TREE_OPERAND (exp, 0);
4169 if (TREE_CODE_CLASS (code) == tcc_comparison
4170 || TREE_CODE_CLASS (code) == tcc_unary
4171 || TREE_CODE_CLASS (code) == tcc_binary)
4172 arg0_type = TREE_TYPE (arg0);
4173 if (TREE_CODE_CLASS (code) == tcc_binary
4174 || TREE_CODE_CLASS (code) == tcc_comparison
4175 || (TREE_CODE_CLASS (code) == tcc_expression
4176 && TREE_OPERAND_LENGTH (exp) > 1))
4177 arg1 = TREE_OPERAND (exp, 1);
4182 case TRUTH_NOT_EXPR:
4183 in_p = ! in_p, exp = arg0;
4186 case EQ_EXPR: case NE_EXPR:
4187 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4188 /* We can only do something if the range is testing for zero
4189 and if the second operand is an integer constant. Note that
4190 saying something is "in" the range we make is done by
4191 complementing IN_P since it will set in the initial case of
4192 being not equal to zero; "out" is leaving it alone. */
4193 if (low == 0 || high == 0
4194 || ! integer_zerop (low) || ! integer_zerop (high)
4195 || TREE_CODE (arg1) != INTEGER_CST)
4200 case NE_EXPR: /* - [c, c] */
4203 case EQ_EXPR: /* + [c, c] */
4204 in_p = ! in_p, low = high = arg1;
4206 case GT_EXPR: /* - [-, c] */
4207 low = 0, high = arg1;
4209 case GE_EXPR: /* + [c, -] */
4210 in_p = ! in_p, low = arg1, high = 0;
4212 case LT_EXPR: /* - [c, -] */
4213 low = arg1, high = 0;
4215 case LE_EXPR: /* + [-, c] */
4216 in_p = ! in_p, low = 0, high = arg1;
4222 /* If this is an unsigned comparison, we also know that EXP is
4223 greater than or equal to zero. We base the range tests we make
4224 on that fact, so we record it here so we can parse existing
4225 range tests. We test arg0_type since often the return type
4226 of, e.g. EQ_EXPR, is boolean. */
4227 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4229 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4231 build_int_cst (arg0_type, 0),
4235 in_p = n_in_p, low = n_low, high = n_high;
4237 /* If the high bound is missing, but we have a nonzero low
4238 bound, reverse the range so it goes from zero to the low bound
4240 if (high == 0 && low && ! integer_zerop (low))
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4244 integer_one_node, 0);
4245 low = build_int_cst (arg0_type, 0);
4253 /* (-x) IN [a,b] -> x in [-b, -a] */
4254 n_low = range_binop (MINUS_EXPR, exp_type,
4255 build_int_cst (exp_type, 0),
4257 n_high = range_binop (MINUS_EXPR, exp_type,
4258 build_int_cst (exp_type, 0),
4260 low = n_low, high = n_high;
4266 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4267 build_int_cst (exp_type, 1));
4270 case PLUS_EXPR: case MINUS_EXPR:
4271 if (TREE_CODE (arg1) != INTEGER_CST)
4274 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4275 move a constant to the other side. */
4276 if (!TYPE_UNSIGNED (arg0_type)
4277 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4280 /* If EXP is signed, any overflow in the computation is undefined,
4281 so we don't worry about it so long as our computations on
4282 the bounds don't overflow. For unsigned, overflow is defined
4283 and this is exactly the right thing. */
4284 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4285 arg0_type, low, 0, arg1, 0);
4286 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4287 arg0_type, high, 1, arg1, 0);
4288 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4289 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4292 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4293 *strict_overflow_p = true;
4295 /* Check for an unsigned range which has wrapped around the maximum
4296 value thus making n_high < n_low, and normalize it. */
4297 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4299 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4300 integer_one_node, 0);
4301 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4302 integer_one_node, 0);
4304 /* If the range is of the form +/- [ x+1, x ], we won't
4305 be able to normalize it. But then, it represents the
4306 whole range or the empty set, so make it
4308 if (tree_int_cst_equal (n_low, low)
4309 && tree_int_cst_equal (n_high, high))
4315 low = n_low, high = n_high;
4320 CASE_CONVERT: case NON_LVALUE_EXPR:
4321 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4324 if (! INTEGRAL_TYPE_P (arg0_type)
4325 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4326 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4329 n_low = low, n_high = high;
4332 n_low = fold_convert (arg0_type, n_low);
4335 n_high = fold_convert (arg0_type, n_high);
4338 /* If we're converting arg0 from an unsigned type, to exp,
4339 a signed type, we will be doing the comparison as unsigned.
4340 The tests above have already verified that LOW and HIGH
4343 So we have to ensure that we will handle large unsigned
4344 values the same way that the current signed bounds treat
4347 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4351 /* For fixed-point modes, we need to pass the saturating flag
4352 as the 2nd parameter. */
4353 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4354 equiv_type = lang_hooks.types.type_for_mode
4355 (TYPE_MODE (arg0_type),
4356 TYPE_SATURATING (arg0_type));
4358 equiv_type = lang_hooks.types.type_for_mode
4359 (TYPE_MODE (arg0_type), 1);
4361 /* A range without an upper bound is, naturally, unbounded.
4362 Since convert would have cropped a very large value, use
4363 the max value for the destination type. */
4365 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4366 : TYPE_MAX_VALUE (arg0_type);
4368 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4369 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4370 fold_convert (arg0_type,
4372 build_int_cst (arg0_type, 1));
4374 /* If the low bound is specified, "and" the range with the
4375 range for which the original unsigned value will be
4379 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4380 1, n_low, n_high, 1,
4381 fold_convert (arg0_type,
4386 in_p = (n_in_p == in_p);
4390 /* Otherwise, "or" the range with the range of the input
4391 that will be interpreted as negative. */
4392 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4393 0, n_low, n_high, 1,
4394 fold_convert (arg0_type,
4399 in_p = (in_p != n_in_p);
4404 low = n_low, high = n_high;
4414 /* If EXP is a constant, we can evaluate whether this is true or false. */
4415 if (TREE_CODE (exp) == INTEGER_CST)
4417 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4419 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4425 *pin_p = in_p, *plow = low, *phigh = high;
4429 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4430 type, TYPE, return an expression to test if EXP is in (or out of, depending
4431 on IN_P) the range. Return 0 if the test couldn't be created. */
4434 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4436 tree etype = TREE_TYPE (exp);
4439 #ifdef HAVE_canonicalize_funcptr_for_compare
4440 /* Disable this optimization for function pointer expressions
4441 on targets that require function pointer canonicalization. */
4442 if (HAVE_canonicalize_funcptr_for_compare
4443 && TREE_CODE (etype) == POINTER_TYPE
4444 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4450 value = build_range_check (type, exp, 1, low, high);
4452 return invert_truthvalue (value);
4457 if (low == 0 && high == 0)
4458 return build_int_cst (type, 1);
4461 return fold_build2 (LE_EXPR, type, exp,
4462 fold_convert (etype, high));
4465 return fold_build2 (GE_EXPR, type, exp,
4466 fold_convert (etype, low));
4468 if (operand_equal_p (low, high, 0))
4469 return fold_build2 (EQ_EXPR, type, exp,
4470 fold_convert (etype, low));
4472 if (integer_zerop (low))
4474 if (! TYPE_UNSIGNED (etype))
4476 etype = unsigned_type_for (etype);
4477 high = fold_convert (etype, high);
4478 exp = fold_convert (etype, exp);
4480 return build_range_check (type, exp, 1, 0, high);
4483 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4484 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4486 unsigned HOST_WIDE_INT lo;
4490 prec = TYPE_PRECISION (etype);
4491 if (prec <= HOST_BITS_PER_WIDE_INT)
4494 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4498 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4499 lo = (unsigned HOST_WIDE_INT) -1;
4502 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4504 if (TYPE_UNSIGNED (etype))
4506 etype = signed_type_for (etype);
4507 exp = fold_convert (etype, exp);
4509 return fold_build2 (GT_EXPR, type, exp,
4510 build_int_cst (etype, 0));
4514 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4515 This requires wrap-around arithmetics for the type of the expression. */
4516 switch (TREE_CODE (etype))
4519 /* There is no requirement that LOW be within the range of ETYPE
4520 if the latter is a subtype. It must, however, be within the base
4521 type of ETYPE. So be sure we do the subtraction in that type. */
4522 if (TREE_TYPE (etype))
4523 etype = TREE_TYPE (etype);
4528 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4529 TYPE_UNSIGNED (etype));
4536 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4537 if (TREE_CODE (etype) == INTEGER_TYPE
4538 && !TYPE_OVERFLOW_WRAPS (etype))
4540 tree utype, minv, maxv;
4542 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4543 for the type in question, as we rely on this here. */
4544 utype = unsigned_type_for (etype);
4545 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4546 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4547 integer_one_node, 1);
4548 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4550 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4557 high = fold_convert (etype, high);
4558 low = fold_convert (etype, low);
4559 exp = fold_convert (etype, exp);
4561 value = const_binop (MINUS_EXPR, high, low, 0);
4564 if (POINTER_TYPE_P (etype))
4566 if (value != 0 && !TREE_OVERFLOW (value))
4568 low = fold_convert (sizetype, low);
4569 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4570 return build_range_check (type,
4571 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4572 1, build_int_cst (etype, 0), value);
4577 if (value != 0 && !TREE_OVERFLOW (value))
4578 return build_range_check (type,
4579 fold_build2 (MINUS_EXPR, etype, exp, low),
4580 1, build_int_cst (etype, 0), value);
4585 /* Return the predecessor of VAL in its type, handling the infinite case. */
4588 range_predecessor (tree val)
4590 tree type = TREE_TYPE (val);
4592 if (INTEGRAL_TYPE_P (type)
4593 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4596 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4599 /* Return the successor of VAL in its type, handling the infinite case. */
4602 range_successor (tree val)
4604 tree type = TREE_TYPE (val);
4606 if (INTEGRAL_TYPE_P (type)
4607 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4610 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4613 /* Given two ranges, see if we can merge them into one. Return 1 if we
4614 can, 0 if we can't. Set the output range into the specified parameters. */
4617 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4618 tree high0, int in1_p, tree low1, tree high1)
4626 int lowequal = ((low0 == 0 && low1 == 0)
4627 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4628 low0, 0, low1, 0)));
4629 int highequal = ((high0 == 0 && high1 == 0)
4630 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4631 high0, 1, high1, 1)));
4633 /* Make range 0 be the range that starts first, or ends last if they
4634 start at the same value. Swap them if it isn't. */
4635 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4638 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4639 high1, 1, high0, 1))))
4641 temp = in0_p, in0_p = in1_p, in1_p = temp;
4642 tem = low0, low0 = low1, low1 = tem;
4643 tem = high0, high0 = high1, high1 = tem;
4646 /* Now flag two cases, whether the ranges are disjoint or whether the
4647 second range is totally subsumed in the first. Note that the tests
4648 below are simplified by the ones above. */
4649 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4650 high0, 1, low1, 0));
4651 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4652 high1, 1, high0, 1));
4654 /* We now have four cases, depending on whether we are including or
4655 excluding the two ranges. */
4658 /* If they don't overlap, the result is false. If the second range
4659 is a subset it is the result. Otherwise, the range is from the start
4660 of the second to the end of the first. */
4662 in_p = 0, low = high = 0;
4664 in_p = 1, low = low1, high = high1;
4666 in_p = 1, low = low1, high = high0;
4669 else if (in0_p && ! in1_p)
4671 /* If they don't overlap, the result is the first range. If they are
4672 equal, the result is false. If the second range is a subset of the
4673 first, and the ranges begin at the same place, we go from just after
4674 the end of the second range to the end of the first. If the second
4675 range is not a subset of the first, or if it is a subset and both
4676 ranges end at the same place, the range starts at the start of the
4677 first range and ends just before the second range.
4678 Otherwise, we can't describe this as a single range. */
4680 in_p = 1, low = low0, high = high0;
4681 else if (lowequal && highequal)
4682 in_p = 0, low = high = 0;
4683 else if (subset && lowequal)
4685 low = range_successor (high1);
4690 /* We are in the weird situation where high0 > high1 but
4691 high1 has no successor. Punt. */
4695 else if (! subset || highequal)
4698 high = range_predecessor (low1);
4702 /* low0 < low1 but low1 has no predecessor. Punt. */
4710 else if (! in0_p && in1_p)
4712 /* If they don't overlap, the result is the second range. If the second
4713 is a subset of the first, the result is false. Otherwise,
4714 the range starts just after the first range and ends at the
4715 end of the second. */
4717 in_p = 1, low = low1, high = high1;
4718 else if (subset || highequal)
4719 in_p = 0, low = high = 0;
4722 low = range_successor (high0);
4727 /* high1 > high0 but high0 has no successor. Punt. */
4735 /* The case where we are excluding both ranges. Here the complex case
4736 is if they don't overlap. In that case, the only time we have a
4737 range is if they are adjacent. If the second is a subset of the
4738 first, the result is the first. Otherwise, the range to exclude
4739 starts at the beginning of the first range and ends at the end of the
4743 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4744 range_successor (high0),
4746 in_p = 0, low = low0, high = high1;
4749 /* Canonicalize - [min, x] into - [-, x]. */
4750 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4751 switch (TREE_CODE (TREE_TYPE (low0)))
4754 if (TYPE_PRECISION (TREE_TYPE (low0))
4755 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4759 if (tree_int_cst_equal (low0,
4760 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4764 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4765 && integer_zerop (low0))
4772 /* Canonicalize - [x, max] into - [x, -]. */
4773 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4774 switch (TREE_CODE (TREE_TYPE (high1)))
4777 if (TYPE_PRECISION (TREE_TYPE (high1))
4778 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4782 if (tree_int_cst_equal (high1,
4783 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4787 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4788 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4790 integer_one_node, 1)))
4797 /* The ranges might be also adjacent between the maximum and
4798 minimum values of the given type. For
4799 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4800 return + [x + 1, y - 1]. */
4801 if (low0 == 0 && high1 == 0)
4803 low = range_successor (high0);
4804 high = range_predecessor (low1);
4805 if (low == 0 || high == 0)
4815 in_p = 0, low = low0, high = high0;
4817 in_p = 0, low = low0, high = high1;
4820 *pin_p = in_p, *plow = low, *phigh = high;
4825 /* Subroutine of fold, looking inside expressions of the form
4826 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4827 of the COND_EXPR. This function is being used also to optimize
4828 A op B ? C : A, by reversing the comparison first.
4830 Return a folded expression whose code is not a COND_EXPR
4831 anymore, or NULL_TREE if no folding opportunity is found. */
4834 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4836 enum tree_code comp_code = TREE_CODE (arg0);
4837 tree arg00 = TREE_OPERAND (arg0, 0);
4838 tree arg01 = TREE_OPERAND (arg0, 1);
4839 tree arg1_type = TREE_TYPE (arg1);
4845 /* If we have A op 0 ? A : -A, consider applying the following
4848 A == 0? A : -A same as -A
4849 A != 0? A : -A same as A
4850 A >= 0? A : -A same as abs (A)
4851 A > 0? A : -A same as abs (A)
4852 A <= 0? A : -A same as -abs (A)
4853 A < 0? A : -A same as -abs (A)
4855 None of these transformations work for modes with signed
4856 zeros. If A is +/-0, the first two transformations will
4857 change the sign of the result (from +0 to -0, or vice
4858 versa). The last four will fix the sign of the result,
4859 even though the original expressions could be positive or
4860 negative, depending on the sign of A.
4862 Note that all these transformations are correct if A is
4863 NaN, since the two alternatives (A and -A) are also NaNs. */
4864 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4865 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4866 ? real_zerop (arg01)
4867 : integer_zerop (arg01))
4868 && ((TREE_CODE (arg2) == NEGATE_EXPR
4869 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4870 /* In the case that A is of the form X-Y, '-A' (arg2) may
4871 have already been folded to Y-X, check for that. */
4872 || (TREE_CODE (arg1) == MINUS_EXPR
4873 && TREE_CODE (arg2) == MINUS_EXPR
4874 && operand_equal_p (TREE_OPERAND (arg1, 0),
4875 TREE_OPERAND (arg2, 1), 0)
4876 && operand_equal_p (TREE_OPERAND (arg1, 1),
4877 TREE_OPERAND (arg2, 0), 0))))
4882 tem = fold_convert (arg1_type, arg1);
4883 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4886 return pedantic_non_lvalue (fold_convert (type, arg1));
4889 if (flag_trapping_math)
4894 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4895 arg1 = fold_convert (signed_type_for
4896 (TREE_TYPE (arg1)), arg1);
4897 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4898 return pedantic_non_lvalue (fold_convert (type, tem));
4901 if (flag_trapping_math)
4905 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4906 arg1 = fold_convert (signed_type_for
4907 (TREE_TYPE (arg1)), arg1);
4908 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4909 return negate_expr (fold_convert (type, tem));
4911 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4915 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4916 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4917 both transformations are correct when A is NaN: A != 0
4918 is then true, and A == 0 is false. */
4920 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4921 && integer_zerop (arg01) && integer_zerop (arg2))
4923 if (comp_code == NE_EXPR)
4924 return pedantic_non_lvalue (fold_convert (type, arg1));
4925 else if (comp_code == EQ_EXPR)
4926 return build_int_cst (type, 0);
4929 /* Try some transformations of A op B ? A : B.
4931 A == B? A : B same as B
4932 A != B? A : B same as A
4933 A >= B? A : B same as max (A, B)
4934 A > B? A : B same as max (B, A)
4935 A <= B? A : B same as min (A, B)
4936 A < B? A : B same as min (B, A)
4938 As above, these transformations don't work in the presence
4939 of signed zeros. For example, if A and B are zeros of
4940 opposite sign, the first two transformations will change
4941 the sign of the result. In the last four, the original
4942 expressions give different results for (A=+0, B=-0) and
4943 (A=-0, B=+0), but the transformed expressions do not.
4945 The first two transformations are correct if either A or B
4946 is a NaN. In the first transformation, the condition will
4947 be false, and B will indeed be chosen. In the case of the
4948 second transformation, the condition A != B will be true,
4949 and A will be chosen.
4951 The conversions to max() and min() are not correct if B is
4952 a number and A is not. The conditions in the original
4953 expressions will be false, so all four give B. The min()
4954 and max() versions would give a NaN instead. */
4955 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4956 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4957 /* Avoid these transformations if the COND_EXPR may be used
4958 as an lvalue in the C++ front-end. PR c++/19199. */
4960 || (strcmp (lang_hooks.name, "GNU C++") != 0
4961 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4962 || ! maybe_lvalue_p (arg1)
4963 || ! maybe_lvalue_p (arg2)))
4965 tree comp_op0 = arg00;
4966 tree comp_op1 = arg01;
4967 tree comp_type = TREE_TYPE (comp_op0);
4969 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4970 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4980 return pedantic_non_lvalue (fold_convert (type, arg2));
4982 return pedantic_non_lvalue (fold_convert (type, arg1));
4987 /* In C++ a ?: expression can be an lvalue, so put the
4988 operand which will be used if they are equal first
4989 so that we can convert this back to the
4990 corresponding COND_EXPR. */
4991 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4993 comp_op0 = fold_convert (comp_type, comp_op0);
4994 comp_op1 = fold_convert (comp_type, comp_op1);
4995 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4996 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4997 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4998 return pedantic_non_lvalue (fold_convert (type, tem));
5005 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5007 comp_op0 = fold_convert (comp_type, comp_op0);
5008 comp_op1 = fold_convert (comp_type, comp_op1);
5009 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5010 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5011 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5012 return pedantic_non_lvalue (fold_convert (type, tem));
5016 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5017 return pedantic_non_lvalue (fold_convert (type, arg2));
5020 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5021 return pedantic_non_lvalue (fold_convert (type, arg1));
5024 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5029 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5030 we might still be able to simplify this. For example,
5031 if C1 is one less or one more than C2, this might have started
5032 out as a MIN or MAX and been transformed by this function.
5033 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5035 if (INTEGRAL_TYPE_P (type)
5036 && TREE_CODE (arg01) == INTEGER_CST
5037 && TREE_CODE (arg2) == INTEGER_CST)
5041 /* We can replace A with C1 in this case. */
5042 arg1 = fold_convert (type, arg01);
5043 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5046 /* If C1 is C2 + 1, this is min(A, C2). */
5047 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5049 && operand_equal_p (arg01,
5050 const_binop (PLUS_EXPR, arg2,
5051 build_int_cst (type, 1), 0),
5053 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5055 fold_convert (type, arg1),
5060 /* If C1 is C2 - 1, this is min(A, C2). */
5061 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5063 && operand_equal_p (arg01,
5064 const_binop (MINUS_EXPR, arg2,
5065 build_int_cst (type, 1), 0),
5067 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5069 fold_convert (type, arg1),
5074 /* If C1 is C2 - 1, this is max(A, C2). */
5075 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5077 && operand_equal_p (arg01,
5078 const_binop (MINUS_EXPR, arg2,
5079 build_int_cst (type, 1), 0),
5081 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5083 fold_convert (type, arg1),
5088 /* If C1 is C2 + 1, this is max(A, C2). */
5089 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5091 && operand_equal_p (arg01,
5092 const_binop (PLUS_EXPR, arg2,
5093 build_int_cst (type, 1), 0),
5095 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5097 fold_convert (type, arg1),
5111 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5112 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5115 /* EXP is some logical combination of boolean tests. See if we can
5116 merge it into some range test. Return the new tree if so. */
5119 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5121 int or_op = (code == TRUTH_ORIF_EXPR
5122 || code == TRUTH_OR_EXPR);
5123 int in0_p, in1_p, in_p;
5124 tree low0, low1, low, high0, high1, high;
5125 bool strict_overflow_p = false;
5126 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5127 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5129 const char * const warnmsg = G_("assuming signed overflow does not occur "
5130 "when simplifying range test");
5132 /* If this is an OR operation, invert both sides; we will invert
5133 again at the end. */
5135 in0_p = ! in0_p, in1_p = ! in1_p;
5137 /* If both expressions are the same, if we can merge the ranges, and we
5138 can build the range test, return it or it inverted. If one of the
5139 ranges is always true or always false, consider it to be the same
5140 expression as the other. */
5141 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5142 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5144 && 0 != (tem = (build_range_check (type,
5146 : rhs != 0 ? rhs : integer_zero_node,
5149 if (strict_overflow_p)
5150 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5151 return or_op ? invert_truthvalue (tem) : tem;
5154 /* On machines where the branch cost is expensive, if this is a
5155 short-circuited branch and the underlying object on both sides
5156 is the same, make a non-short-circuit operation. */
5157 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5158 && lhs != 0 && rhs != 0
5159 && (code == TRUTH_ANDIF_EXPR
5160 || code == TRUTH_ORIF_EXPR)
5161 && operand_equal_p (lhs, rhs, 0))
5163 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5164 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5165 which cases we can't do this. */
5166 if (simple_operand_p (lhs))
5167 return build2 (code == TRUTH_ANDIF_EXPR
5168 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5171 else if (lang_hooks.decls.global_bindings_p () == 0
5172 && ! CONTAINS_PLACEHOLDER_P (lhs))
5174 tree common = save_expr (lhs);
5176 if (0 != (lhs = build_range_check (type, common,
5177 or_op ? ! in0_p : in0_p,
5179 && (0 != (rhs = build_range_check (type, common,
5180 or_op ? ! in1_p : in1_p,
5183 if (strict_overflow_p)
5184 fold_overflow_warning (warnmsg,
5185 WARN_STRICT_OVERFLOW_COMPARISON);
5186 return build2 (code == TRUTH_ANDIF_EXPR
5187 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5196 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5197 bit value. Arrange things so the extra bits will be set to zero if and
5198 only if C is signed-extended to its full width. If MASK is nonzero,
5199 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5202 unextend (tree c, int p, int unsignedp, tree mask)
5204 tree type = TREE_TYPE (c);
5205 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5208 if (p == modesize || unsignedp)
5211 /* We work by getting just the sign bit into the low-order bit, then
5212 into the high-order bit, then sign-extend. We then XOR that value
5214 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5215 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5217 /* We must use a signed type in order to get an arithmetic right shift.
5218 However, we must also avoid introducing accidental overflows, so that
5219 a subsequent call to integer_zerop will work. Hence we must
5220 do the type conversion here. At this point, the constant is either
5221 zero or one, and the conversion to a signed type can never overflow.
5222 We could get an overflow if this conversion is done anywhere else. */
5223 if (TYPE_UNSIGNED (type))
5224 temp = fold_convert (signed_type_for (type), temp);
5226 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5227 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5229 temp = const_binop (BIT_AND_EXPR, temp,
5230 fold_convert (TREE_TYPE (c), mask), 0);
5231 /* If necessary, convert the type back to match the type of C. */
5232 if (TYPE_UNSIGNED (type))
5233 temp = fold_convert (type, temp);
5235 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5238 /* Find ways of folding logical expressions of LHS and RHS:
5239 Try to merge two comparisons to the same innermost item.
5240 Look for range tests like "ch >= '0' && ch <= '9'".
5241 Look for combinations of simple terms on machines with expensive branches
5242 and evaluate the RHS unconditionally.
5244 For example, if we have p->a == 2 && p->b == 4 and we can make an
5245 object large enough to span both A and B, we can do this with a comparison
5246 against the object ANDed with the a mask.
5248 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5249 operations to do this with one comparison.
5251 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5252 function and the one above.
5254 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5255 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5257 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5260 We return the simplified tree or 0 if no optimization is possible. */
5263 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5265 /* If this is the "or" of two comparisons, we can do something if
5266 the comparisons are NE_EXPR. If this is the "and", we can do something
5267 if the comparisons are EQ_EXPR. I.e.,
5268 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5270 WANTED_CODE is this operation code. For single bit fields, we can
5271 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5272 comparison for one-bit fields. */
5274 enum tree_code wanted_code;
5275 enum tree_code lcode, rcode;
5276 tree ll_arg, lr_arg, rl_arg, rr_arg;
5277 tree ll_inner, lr_inner, rl_inner, rr_inner;
5278 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5279 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5280 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5281 HOST_WIDE_INT lnbitsize, lnbitpos;
5282 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5283 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5284 enum machine_mode lnmode;
5285 tree ll_mask, lr_mask, rl_mask, rr_mask;
5286 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5287 tree l_const, r_const;
5288 tree lntype, result;
5289 int first_bit, end_bit;
5291 tree orig_lhs = lhs, orig_rhs = rhs;
5292 enum tree_code orig_code = code;
5294 /* Start by getting the comparison codes. Fail if anything is volatile.
5295 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5296 it were surrounded with a NE_EXPR. */
5298 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5301 lcode = TREE_CODE (lhs);
5302 rcode = TREE_CODE (rhs);
5304 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5306 lhs = build2 (NE_EXPR, truth_type, lhs,
5307 build_int_cst (TREE_TYPE (lhs), 0));
5311 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5313 rhs = build2 (NE_EXPR, truth_type, rhs,
5314 build_int_cst (TREE_TYPE (rhs), 0));
5318 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5319 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5322 ll_arg = TREE_OPERAND (lhs, 0);
5323 lr_arg = TREE_OPERAND (lhs, 1);
5324 rl_arg = TREE_OPERAND (rhs, 0);
5325 rr_arg = TREE_OPERAND (rhs, 1);
5327 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5328 if (simple_operand_p (ll_arg)
5329 && simple_operand_p (lr_arg))
5332 if (operand_equal_p (ll_arg, rl_arg, 0)
5333 && operand_equal_p (lr_arg, rr_arg, 0))
5335 result = combine_comparisons (code, lcode, rcode,
5336 truth_type, ll_arg, lr_arg);
5340 else if (operand_equal_p (ll_arg, rr_arg, 0)
5341 && operand_equal_p (lr_arg, rl_arg, 0))
5343 result = combine_comparisons (code, lcode,
5344 swap_tree_comparison (rcode),
5345 truth_type, ll_arg, lr_arg);
5351 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5352 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5354 /* If the RHS can be evaluated unconditionally and its operands are
5355 simple, it wins to evaluate the RHS unconditionally on machines
5356 with expensive branches. In this case, this isn't a comparison
5357 that can be merged. Avoid doing this if the RHS is a floating-point
5358 comparison since those can trap. */
5360 if (BRANCH_COST >= 2
5361 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5362 && simple_operand_p (rl_arg)
5363 && simple_operand_p (rr_arg))
5365 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5366 if (code == TRUTH_OR_EXPR
5367 && lcode == NE_EXPR && integer_zerop (lr_arg)
5368 && rcode == NE_EXPR && integer_zerop (rr_arg)
5369 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5370 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5371 return build2 (NE_EXPR, truth_type,
5372 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5374 build_int_cst (TREE_TYPE (ll_arg), 0));
5376 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5377 if (code == TRUTH_AND_EXPR
5378 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5379 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5380 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5381 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5382 return build2 (EQ_EXPR, truth_type,
5383 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5385 build_int_cst (TREE_TYPE (ll_arg), 0));
5387 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5389 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5390 return build2 (code, truth_type, lhs, rhs);
5395 /* See if the comparisons can be merged. Then get all the parameters for
5398 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5399 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5403 ll_inner = decode_field_reference (ll_arg,
5404 &ll_bitsize, &ll_bitpos, &ll_mode,
5405 &ll_unsignedp, &volatilep, &ll_mask,
5407 lr_inner = decode_field_reference (lr_arg,
5408 &lr_bitsize, &lr_bitpos, &lr_mode,
5409 &lr_unsignedp, &volatilep, &lr_mask,
5411 rl_inner = decode_field_reference (rl_arg,
5412 &rl_bitsize, &rl_bitpos, &rl_mode,
5413 &rl_unsignedp, &volatilep, &rl_mask,
5415 rr_inner = decode_field_reference (rr_arg,
5416 &rr_bitsize, &rr_bitpos, &rr_mode,
5417 &rr_unsignedp, &volatilep, &rr_mask,
5420 /* It must be true that the inner operation on the lhs of each
5421 comparison must be the same if we are to be able to do anything.
5422 Then see if we have constants. If not, the same must be true for
5424 if (volatilep || ll_inner == 0 || rl_inner == 0
5425 || ! operand_equal_p (ll_inner, rl_inner, 0))
5428 if (TREE_CODE (lr_arg) == INTEGER_CST
5429 && TREE_CODE (rr_arg) == INTEGER_CST)
5430 l_const = lr_arg, r_const = rr_arg;
5431 else if (lr_inner == 0 || rr_inner == 0
5432 || ! operand_equal_p (lr_inner, rr_inner, 0))
5435 l_const = r_const = 0;
5437 /* If either comparison code is not correct for our logical operation,
5438 fail. However, we can convert a one-bit comparison against zero into
5439 the opposite comparison against that bit being set in the field. */
5441 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5442 if (lcode != wanted_code)
5444 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5446 /* Make the left operand unsigned, since we are only interested
5447 in the value of one bit. Otherwise we are doing the wrong
5456 /* This is analogous to the code for l_const above. */
5457 if (rcode != wanted_code)
5459 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5468 /* See if we can find a mode that contains both fields being compared on
5469 the left. If we can't, fail. Otherwise, update all constants and masks
5470 to be relative to a field of that size. */
5471 first_bit = MIN (ll_bitpos, rl_bitpos);
5472 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5473 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5474 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5476 if (lnmode == VOIDmode)
5479 lnbitsize = GET_MODE_BITSIZE (lnmode);
5480 lnbitpos = first_bit & ~ (lnbitsize - 1);
5481 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5482 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5484 if (BYTES_BIG_ENDIAN)
5486 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5487 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5490 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5491 size_int (xll_bitpos), 0);
5492 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5493 size_int (xrl_bitpos), 0);
5497 l_const = fold_convert (lntype, l_const);
5498 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5499 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5500 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5501 fold_build1 (BIT_NOT_EXPR,
5505 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5507 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5512 r_const = fold_convert (lntype, r_const);
5513 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5514 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5515 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5516 fold_build1 (BIT_NOT_EXPR,
5520 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5522 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5526 /* Handle the case of comparisons with constants. If there is something in
5527 common between the masks, those bits of the constants must be the same.
5528 If not, the condition is always false. Test for this to avoid generating
5529 incorrect code below. */
5530 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5531 if (! integer_zerop (result)
5532 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5533 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5535 if (wanted_code == NE_EXPR)
5537 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5538 return constant_boolean_node (true, truth_type);
5542 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5543 return constant_boolean_node (false, truth_type);
5550 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5554 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5557 enum tree_code op_code;
5560 int consts_equal, consts_lt;
5563 STRIP_SIGN_NOPS (arg0);
5565 op_code = TREE_CODE (arg0);
5566 minmax_const = TREE_OPERAND (arg0, 1);
5567 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5568 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5569 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5570 inner = TREE_OPERAND (arg0, 0);
5572 /* If something does not permit us to optimize, return the original tree. */
5573 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5574 || TREE_CODE (comp_const) != INTEGER_CST
5575 || TREE_OVERFLOW (comp_const)
5576 || TREE_CODE (minmax_const) != INTEGER_CST
5577 || TREE_OVERFLOW (minmax_const))
5580 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5581 and GT_EXPR, doing the rest with recursive calls using logical
5585 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5587 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5590 return invert_truthvalue (tem);
5596 fold_build2 (TRUTH_ORIF_EXPR, type,
5597 optimize_minmax_comparison
5598 (EQ_EXPR, type, arg0, comp_const),
5599 optimize_minmax_comparison
5600 (GT_EXPR, type, arg0, comp_const));
5603 if (op_code == MAX_EXPR && consts_equal)
5604 /* MAX (X, 0) == 0 -> X <= 0 */
5605 return fold_build2 (LE_EXPR, type, inner, comp_const);
5607 else if (op_code == MAX_EXPR && consts_lt)
5608 /* MAX (X, 0) == 5 -> X == 5 */
5609 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5611 else if (op_code == MAX_EXPR)
5612 /* MAX (X, 0) == -1 -> false */
5613 return omit_one_operand (type, integer_zero_node, inner);
5615 else if (consts_equal)
5616 /* MIN (X, 0) == 0 -> X >= 0 */
5617 return fold_build2 (GE_EXPR, type, inner, comp_const);
5620 /* MIN (X, 0) == 5 -> false */
5621 return omit_one_operand (type, integer_zero_node, inner);
5624 /* MIN (X, 0) == -1 -> X == -1 */
5625 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5628 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5629 /* MAX (X, 0) > 0 -> X > 0
5630 MAX (X, 0) > 5 -> X > 5 */
5631 return fold_build2 (GT_EXPR, type, inner, comp_const);
5633 else if (op_code == MAX_EXPR)
5634 /* MAX (X, 0) > -1 -> true */
5635 return omit_one_operand (type, integer_one_node, inner);
5637 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5638 /* MIN (X, 0) > 0 -> false
5639 MIN (X, 0) > 5 -> false */
5640 return omit_one_operand (type, integer_zero_node, inner);
5643 /* MIN (X, 0) > -1 -> X > -1 */
5644 return fold_build2 (GT_EXPR, type, inner, comp_const);
5651 /* T is an integer expression that is being multiplied, divided, or taken a
5652 modulus (CODE says which and what kind of divide or modulus) by a
5653 constant C. See if we can eliminate that operation by folding it with
5654 other operations already in T. WIDE_TYPE, if non-null, is a type that
5655 should be used for the computation if wider than our type.
5657 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5658 (X * 2) + (Y * 4). We must, however, be assured that either the original
5659 expression would not overflow or that overflow is undefined for the type
5660 in the language in question.
5662 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5663 the machine has a multiply-accumulate insn or that this is part of an
5664 addressing calculation.
5666 If we return a non-null expression, it is an equivalent form of the
5667 original computation, but need not be in the original type.
5669 We set *STRICT_OVERFLOW_P to true if the return values depends on
5670 signed overflow being undefined. Otherwise we do not change
5671 *STRICT_OVERFLOW_P. */
5674 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5675 bool *strict_overflow_p)
5677 /* To avoid exponential search depth, refuse to allow recursion past
5678 three levels. Beyond that (1) it's highly unlikely that we'll find
5679 something interesting and (2) we've probably processed it before
5680 when we built the inner expression. */
5689 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5696 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5697 bool *strict_overflow_p)
5699 tree type = TREE_TYPE (t);
5700 enum tree_code tcode = TREE_CODE (t);
5701 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5702 > GET_MODE_SIZE (TYPE_MODE (type)))
5703 ? wide_type : type);
5705 int same_p = tcode == code;
5706 tree op0 = NULL_TREE, op1 = NULL_TREE;
5707 bool sub_strict_overflow_p;
5709 /* Don't deal with constants of zero here; they confuse the code below. */
5710 if (integer_zerop (c))
5713 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5714 op0 = TREE_OPERAND (t, 0);
5716 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5717 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5719 /* Note that we need not handle conditional operations here since fold
5720 already handles those cases. So just do arithmetic here. */
5724 /* For a constant, we can always simplify if we are a multiply
5725 or (for divide and modulus) if it is a multiple of our constant. */
5726 if (code == MULT_EXPR
5727 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5728 return const_binop (code, fold_convert (ctype, t),
5729 fold_convert (ctype, c), 0);
5732 CASE_CONVERT: case NON_LVALUE_EXPR:
5733 /* If op0 is an expression ... */
5734 if ((COMPARISON_CLASS_P (op0)
5735 || UNARY_CLASS_P (op0)
5736 || BINARY_CLASS_P (op0)
5737 || VL_EXP_CLASS_P (op0)
5738 || EXPRESSION_CLASS_P (op0))
5739 /* ... and has wrapping overflow, and its type is smaller
5740 than ctype, then we cannot pass through as widening. */
5741 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5742 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5743 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5744 && (TYPE_PRECISION (ctype)
5745 > TYPE_PRECISION (TREE_TYPE (op0))))
5746 /* ... or this is a truncation (t is narrower than op0),
5747 then we cannot pass through this narrowing. */
5748 || (TYPE_PRECISION (type)
5749 < TYPE_PRECISION (TREE_TYPE (op0)))
5750 /* ... or signedness changes for division or modulus,
5751 then we cannot pass through this conversion. */
5752 || (code != MULT_EXPR
5753 && (TYPE_UNSIGNED (ctype)
5754 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5755 /* ... or has undefined overflow while the converted to
5756 type has not, we cannot do the operation in the inner type
5757 as that would introduce undefined overflow. */
5758 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5759 && !TYPE_OVERFLOW_UNDEFINED (type))))
5762 /* Pass the constant down and see if we can make a simplification. If
5763 we can, replace this expression with the inner simplification for
5764 possible later conversion to our or some other type. */
5765 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5766 && TREE_CODE (t2) == INTEGER_CST
5767 && !TREE_OVERFLOW (t2)
5768 && (0 != (t1 = extract_muldiv (op0, t2, code,
5770 ? ctype : NULL_TREE,
5771 strict_overflow_p))))
5776 /* If widening the type changes it from signed to unsigned, then we
5777 must avoid building ABS_EXPR itself as unsigned. */
5778 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5780 tree cstype = (*signed_type_for) (ctype);
5781 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5784 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5785 return fold_convert (ctype, t1);
5789 /* If the constant is negative, we cannot simplify this. */
5790 if (tree_int_cst_sgn (c) == -1)
5794 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5796 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5799 case MIN_EXPR: case MAX_EXPR:
5800 /* If widening the type changes the signedness, then we can't perform
5801 this optimization as that changes the result. */
5802 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5805 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5806 sub_strict_overflow_p = false;
5807 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5808 &sub_strict_overflow_p)) != 0
5809 && (t2 = extract_muldiv (op1, c, code, wide_type,
5810 &sub_strict_overflow_p)) != 0)
5812 if (tree_int_cst_sgn (c) < 0)
5813 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5814 if (sub_strict_overflow_p)
5815 *strict_overflow_p = true;
5816 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5817 fold_convert (ctype, t2));
5821 case LSHIFT_EXPR: case RSHIFT_EXPR:
5822 /* If the second operand is constant, this is a multiplication
5823 or floor division, by a power of two, so we can treat it that
5824 way unless the multiplier or divisor overflows. Signed
5825 left-shift overflow is implementation-defined rather than
5826 undefined in C90, so do not convert signed left shift into
5828 if (TREE_CODE (op1) == INTEGER_CST
5829 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5830 /* const_binop may not detect overflow correctly,
5831 so check for it explicitly here. */
5832 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5833 && TREE_INT_CST_HIGH (op1) == 0
5834 && 0 != (t1 = fold_convert (ctype,
5835 const_binop (LSHIFT_EXPR,
5838 && !TREE_OVERFLOW (t1))
5839 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5840 ? MULT_EXPR : FLOOR_DIV_EXPR,
5841 ctype, fold_convert (ctype, op0), t1),
5842 c, code, wide_type, strict_overflow_p);
5845 case PLUS_EXPR: case MINUS_EXPR:
5846 /* See if we can eliminate the operation on both sides. If we can, we
5847 can return a new PLUS or MINUS. If we can't, the only remaining
5848 cases where we can do anything are if the second operand is a
5850 sub_strict_overflow_p = false;
5851 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5852 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5853 if (t1 != 0 && t2 != 0
5854 && (code == MULT_EXPR
5855 /* If not multiplication, we can only do this if both operands
5856 are divisible by c. */
5857 || (multiple_of_p (ctype, op0, c)
5858 && multiple_of_p (ctype, op1, c))))
5860 if (sub_strict_overflow_p)
5861 *strict_overflow_p = true;
5862 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5863 fold_convert (ctype, t2));
5866 /* If this was a subtraction, negate OP1 and set it to be an addition.
5867 This simplifies the logic below. */
5868 if (tcode == MINUS_EXPR)
5869 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5871 if (TREE_CODE (op1) != INTEGER_CST)
5874 /* If either OP1 or C are negative, this optimization is not safe for
5875 some of the division and remainder types while for others we need
5876 to change the code. */
5877 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5879 if (code == CEIL_DIV_EXPR)
5880 code = FLOOR_DIV_EXPR;
5881 else if (code == FLOOR_DIV_EXPR)
5882 code = CEIL_DIV_EXPR;
5883 else if (code != MULT_EXPR
5884 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5888 /* If it's a multiply or a division/modulus operation of a multiple
5889 of our constant, do the operation and verify it doesn't overflow. */
5890 if (code == MULT_EXPR
5891 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5893 op1 = const_binop (code, fold_convert (ctype, op1),
5894 fold_convert (ctype, c), 0);
5895 /* We allow the constant to overflow with wrapping semantics. */
5897 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5903 /* If we have an unsigned type is not a sizetype, we cannot widen
5904 the operation since it will change the result if the original
5905 computation overflowed. */
5906 if (TYPE_UNSIGNED (ctype)
5907 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5911 /* If we were able to eliminate our operation from the first side,
5912 apply our operation to the second side and reform the PLUS. */
5913 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5916 /* The last case is if we are a multiply. In that case, we can
5917 apply the distributive law to commute the multiply and addition
5918 if the multiplication of the constants doesn't overflow. */
5919 if (code == MULT_EXPR)
5920 return fold_build2 (tcode, ctype,
5921 fold_build2 (code, ctype,
5922 fold_convert (ctype, op0),
5923 fold_convert (ctype, c)),
5929 /* We have a special case here if we are doing something like
5930 (C * 8) % 4 since we know that's zero. */
5931 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5932 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5933 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5934 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5935 return omit_one_operand (type, integer_zero_node, op0);
5937 /* ... fall through ... */
5939 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5940 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5941 /* If we can extract our operation from the LHS, do so and return a
5942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5943 do something only if the second operand is a constant. */
5945 && (t1 = extract_muldiv (op0, c, code, wide_type,
5946 strict_overflow_p)) != 0)
5947 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5948 fold_convert (ctype, op1));
5949 else if (tcode == MULT_EXPR && code == MULT_EXPR
5950 && (t1 = extract_muldiv (op1, c, code, wide_type,
5951 strict_overflow_p)) != 0)
5952 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5953 fold_convert (ctype, t1));
5954 else if (TREE_CODE (op1) != INTEGER_CST)
5957 /* If these are the same operation types, we can associate them
5958 assuming no overflow. */
5960 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5961 fold_convert (ctype, c), 1))
5962 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5963 TREE_INT_CST_HIGH (t1),
5964 (TYPE_UNSIGNED (ctype)
5965 && tcode != MULT_EXPR) ? -1 : 1,
5966 TREE_OVERFLOW (t1)))
5967 && !TREE_OVERFLOW (t1))
5968 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5970 /* If these operations "cancel" each other, we have the main
5971 optimizations of this pass, which occur when either constant is a
5972 multiple of the other, in which case we replace this with either an
5973 operation or CODE or TCODE.
5975 If we have an unsigned type that is not a sizetype, we cannot do
5976 this since it will change the result if the original computation
5978 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5979 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5980 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5981 || (tcode == MULT_EXPR
5982 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5983 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5984 && code != MULT_EXPR)))
5986 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5988 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5989 *strict_overflow_p = true;
5990 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5991 fold_convert (ctype,
5992 const_binop (TRUNC_DIV_EXPR,
5995 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5997 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5998 *strict_overflow_p = true;
5999 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6000 fold_convert (ctype,
6001 const_binop (TRUNC_DIV_EXPR,
6014 /* Return a node which has the indicated constant VALUE (either 0 or
6015 1), and is of the indicated TYPE. */
6018 constant_boolean_node (int value, tree type)
6020 if (type == integer_type_node)
6021 return value ? integer_one_node : integer_zero_node;
6022 else if (type == boolean_type_node)
6023 return value ? boolean_true_node : boolean_false_node;
6025 return build_int_cst (type, value);
6029 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6030 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6031 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6032 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6033 COND is the first argument to CODE; otherwise (as in the example
6034 given here), it is the second argument. TYPE is the type of the
6035 original expression. Return NULL_TREE if no simplification is
6039 fold_binary_op_with_conditional_arg (enum tree_code code,
6040 tree type, tree op0, tree op1,
6041 tree cond, tree arg, int cond_first_p)
6043 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6044 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6045 tree test, true_value, false_value;
6046 tree lhs = NULL_TREE;
6047 tree rhs = NULL_TREE;
6049 /* This transformation is only worthwhile if we don't have to wrap
6050 arg in a SAVE_EXPR, and the operation can be simplified on at least
6051 one of the branches once its pushed inside the COND_EXPR. */
6052 if (!TREE_CONSTANT (arg))
6055 if (TREE_CODE (cond) == COND_EXPR)
6057 test = TREE_OPERAND (cond, 0);
6058 true_value = TREE_OPERAND (cond, 1);
6059 false_value = TREE_OPERAND (cond, 2);
6060 /* If this operand throws an expression, then it does not make
6061 sense to try to perform a logical or arithmetic operation
6063 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6065 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6070 tree testtype = TREE_TYPE (cond);
6072 true_value = constant_boolean_node (true, testtype);
6073 false_value = constant_boolean_node (false, testtype);
6076 arg = fold_convert (arg_type, arg);
6079 true_value = fold_convert (cond_type, true_value);
6081 lhs = fold_build2 (code, type, true_value, arg);
6083 lhs = fold_build2 (code, type, arg, true_value);
6087 false_value = fold_convert (cond_type, false_value);
6089 rhs = fold_build2 (code, type, false_value, arg);
6091 rhs = fold_build2 (code, type, arg, false_value);
6094 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6095 return fold_convert (type, test);
6099 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6101 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6102 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6103 ADDEND is the same as X.
6105 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6106 and finite. The problematic cases are when X is zero, and its mode
6107 has signed zeros. In the case of rounding towards -infinity,
6108 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6109 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6112 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6114 if (!real_zerop (addend))
6117 /* Don't allow the fold with -fsignaling-nans. */
6118 if (HONOR_SNANS (TYPE_MODE (type)))
6121 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6122 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6125 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6126 if (TREE_CODE (addend) == REAL_CST
6127 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6130 /* The mode has signed zeros, and we have to honor their sign.
6131 In this situation, there is only one case we can return true for.
6132 X - 0 is the same as X unless rounding towards -infinity is
6134 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6137 /* Subroutine of fold() that checks comparisons of built-in math
6138 functions against real constants.
6140 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6141 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6142 is the type of the result and ARG0 and ARG1 are the operands of the
6143 comparison. ARG1 must be a TREE_REAL_CST.
6145 The function returns the constant folded tree if a simplification
6146 can be made, and NULL_TREE otherwise. */
6149 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6150 tree type, tree arg0, tree arg1)
6154 if (BUILTIN_SQRT_P (fcode))
6156 tree arg = CALL_EXPR_ARG (arg0, 0);
6157 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6159 c = TREE_REAL_CST (arg1);
6160 if (REAL_VALUE_NEGATIVE (c))
6162 /* sqrt(x) < y is always false, if y is negative. */
6163 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6164 return omit_one_operand (type, integer_zero_node, arg);
6166 /* sqrt(x) > y is always true, if y is negative and we
6167 don't care about NaNs, i.e. negative values of x. */
6168 if (code == NE_EXPR || !HONOR_NANS (mode))
6169 return omit_one_operand (type, integer_one_node, arg);
6171 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6172 return fold_build2 (GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg), dconst0));
6175 else if (code == GT_EXPR || code == GE_EXPR)
6179 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6180 real_convert (&c2, mode, &c2);
6182 if (REAL_VALUE_ISINF (c2))
6184 /* sqrt(x) > y is x == +Inf, when y is very large. */
6185 if (HONOR_INFINITIES (mode))
6186 return fold_build2 (EQ_EXPR, type, arg,
6187 build_real (TREE_TYPE (arg), c2));
6189 /* sqrt(x) > y is always false, when y is very large
6190 and we don't care about infinities. */
6191 return omit_one_operand (type, integer_zero_node, arg);
6194 /* sqrt(x) > c is the same as x > c*c. */
6195 return fold_build2 (code, type, arg,
6196 build_real (TREE_TYPE (arg), c2));
6198 else if (code == LT_EXPR || code == LE_EXPR)
6202 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6203 real_convert (&c2, mode, &c2);
6205 if (REAL_VALUE_ISINF (c2))
6207 /* sqrt(x) < y is always true, when y is a very large
6208 value and we don't care about NaNs or Infinities. */
6209 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6210 return omit_one_operand (type, integer_one_node, arg);
6212 /* sqrt(x) < y is x != +Inf when y is very large and we
6213 don't care about NaNs. */
6214 if (! HONOR_NANS (mode))
6215 return fold_build2 (NE_EXPR, type, arg,
6216 build_real (TREE_TYPE (arg), c2));
6218 /* sqrt(x) < y is x >= 0 when y is very large and we
6219 don't care about Infinities. */
6220 if (! HONOR_INFINITIES (mode))
6221 return fold_build2 (GE_EXPR, type, arg,
6222 build_real (TREE_TYPE (arg), dconst0));
6224 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6225 if (lang_hooks.decls.global_bindings_p () != 0
6226 || CONTAINS_PLACEHOLDER_P (arg))
6229 arg = save_expr (arg);
6230 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6231 fold_build2 (GE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg),
6234 fold_build2 (NE_EXPR, type, arg,
6235 build_real (TREE_TYPE (arg),
6239 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return fold_build2 (code, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6244 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6245 if (lang_hooks.decls.global_bindings_p () == 0
6246 && ! CONTAINS_PLACEHOLDER_P (arg))
6248 arg = save_expr (arg);
6249 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6250 fold_build2 (GE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg),
6253 fold_build2 (code, type, arg,
6254 build_real (TREE_TYPE (arg),
6263 /* Subroutine of fold() that optimizes comparisons against Infinities,
6264 either +Inf or -Inf.
6266 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6267 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6268 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6270 The function returns the constant folded tree if a simplification
6271 can be made, and NULL_TREE otherwise. */
6274 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6276 enum machine_mode mode;
6277 REAL_VALUE_TYPE max;
6281 mode = TYPE_MODE (TREE_TYPE (arg0));
6283 /* For negative infinity swap the sense of the comparison. */
6284 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6286 code = swap_tree_comparison (code);
6291 /* x > +Inf is always false, if with ignore sNANs. */
6292 if (HONOR_SNANS (mode))
6294 return omit_one_operand (type, integer_zero_node, arg0);
6297 /* x <= +Inf is always true, if we don't case about NaNs. */
6298 if (! HONOR_NANS (mode))
6299 return omit_one_operand (type, integer_one_node, arg0);
6301 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6302 if (lang_hooks.decls.global_bindings_p () == 0
6303 && ! CONTAINS_PLACEHOLDER_P (arg0))
6305 arg0 = save_expr (arg0);
6306 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6312 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6313 real_maxval (&max, neg, mode);
6314 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6315 arg0, build_real (TREE_TYPE (arg0), max));
6318 /* x < +Inf is always equal to x <= DBL_MAX. */
6319 real_maxval (&max, neg, mode);
6320 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6321 arg0, build_real (TREE_TYPE (arg0), max));
6324 /* x != +Inf is always equal to !(x > DBL_MAX). */
6325 real_maxval (&max, neg, mode);
6326 if (! HONOR_NANS (mode))
6327 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6328 arg0, build_real (TREE_TYPE (arg0), max));
6330 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6332 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6341 /* Subroutine of fold() that optimizes comparisons of a division by
6342 a nonzero integer constant against an integer constant, i.e.
6345 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6346 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6347 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6349 The function returns the constant folded tree if a simplification
6350 can be made, and NULL_TREE otherwise. */
6353 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6355 tree prod, tmp, hi, lo;
6356 tree arg00 = TREE_OPERAND (arg0, 0);
6357 tree arg01 = TREE_OPERAND (arg0, 1);
6358 unsigned HOST_WIDE_INT lpart;
6359 HOST_WIDE_INT hpart;
6360 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6364 /* We have to do this the hard way to detect unsigned overflow.
6365 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6366 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6367 TREE_INT_CST_HIGH (arg01),
6368 TREE_INT_CST_LOW (arg1),
6369 TREE_INT_CST_HIGH (arg1),
6370 &lpart, &hpart, unsigned_p);
6371 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6373 neg_overflow = false;
6377 tmp = int_const_binop (MINUS_EXPR, arg01,
6378 build_int_cst (TREE_TYPE (arg01), 1), 0);
6381 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6382 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6383 TREE_INT_CST_HIGH (prod),
6384 TREE_INT_CST_LOW (tmp),
6385 TREE_INT_CST_HIGH (tmp),
6386 &lpart, &hpart, unsigned_p);
6387 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6388 -1, overflow | TREE_OVERFLOW (prod));
6390 else if (tree_int_cst_sgn (arg01) >= 0)
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1), 0);
6394 switch (tree_int_cst_sgn (arg1))
6397 neg_overflow = true;
6398 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6403 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6408 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6418 /* A negative divisor reverses the relational operators. */
6419 code = swap_tree_comparison (code);
6421 tmp = int_const_binop (PLUS_EXPR, arg01,
6422 build_int_cst (TREE_TYPE (arg01), 1), 0);
6423 switch (tree_int_cst_sgn (arg1))
6426 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6431 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6436 neg_overflow = true;
6437 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6449 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6450 return omit_one_operand (type, integer_zero_node, arg00);
6451 if (TREE_OVERFLOW (hi))
6452 return fold_build2 (GE_EXPR, type, arg00, lo);
6453 if (TREE_OVERFLOW (lo))
6454 return fold_build2 (LE_EXPR, type, arg00, hi);
6455 return build_range_check (type, arg00, 1, lo, hi);
6458 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6459 return omit_one_operand (type, integer_one_node, arg00);
6460 if (TREE_OVERFLOW (hi))
6461 return fold_build2 (LT_EXPR, type, arg00, lo);
6462 if (TREE_OVERFLOW (lo))
6463 return fold_build2 (GT_EXPR, type, arg00, hi);
6464 return build_range_check (type, arg00, 0, lo, hi);
6467 if (TREE_OVERFLOW (lo))
6469 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6470 return omit_one_operand (type, tmp, arg00);
6472 return fold_build2 (LT_EXPR, type, arg00, lo);
6475 if (TREE_OVERFLOW (hi))
6477 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6478 return omit_one_operand (type, tmp, arg00);
6480 return fold_build2 (LE_EXPR, type, arg00, hi);
6483 if (TREE_OVERFLOW (hi))
6485 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6486 return omit_one_operand (type, tmp, arg00);
6488 return fold_build2 (GT_EXPR, type, arg00, hi);
6491 if (TREE_OVERFLOW (lo))
6493 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6494 return omit_one_operand (type, tmp, arg00);
6496 return fold_build2 (GE_EXPR, type, arg00, lo);
6506 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6507 equality/inequality test, then return a simplified form of the test
6508 using a sign testing. Otherwise return NULL. TYPE is the desired
6512 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6515 /* If this is testing a single bit, we can optimize the test. */
6516 if ((code == NE_EXPR || code == EQ_EXPR)
6517 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6518 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6520 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6521 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6522 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6524 if (arg00 != NULL_TREE
6525 /* This is only a win if casting to a signed type is cheap,
6526 i.e. when arg00's type is not a partial mode. */
6527 && TYPE_PRECISION (TREE_TYPE (arg00))
6528 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6530 tree stype = signed_type_for (TREE_TYPE (arg00));
6531 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6532 result_type, fold_convert (stype, arg00),
6533 build_int_cst (stype, 0));
6540 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6541 equality/inequality test, then return a simplified form of
6542 the test using shifts and logical operations. Otherwise return
6543 NULL. TYPE is the desired result type. */
6546 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6549 /* If this is testing a single bit, we can optimize the test. */
6550 if ((code == NE_EXPR || code == EQ_EXPR)
6551 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6552 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6554 tree inner = TREE_OPERAND (arg0, 0);
6555 tree type = TREE_TYPE (arg0);
6556 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6557 enum machine_mode operand_mode = TYPE_MODE (type);
6559 tree signed_type, unsigned_type, intermediate_type;
6562 /* First, see if we can fold the single bit test into a sign-bit
6564 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6569 /* Otherwise we have (A & C) != 0 where C is a single bit,
6570 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6571 Similarly for (A & C) == 0. */
6573 /* If INNER is a right shift of a constant and it plus BITNUM does
6574 not overflow, adjust BITNUM and INNER. */
6575 if (TREE_CODE (inner) == RSHIFT_EXPR
6576 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6577 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6578 && bitnum < TYPE_PRECISION (type)
6579 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6580 bitnum - TYPE_PRECISION (type)))
6582 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6583 inner = TREE_OPERAND (inner, 0);
6586 /* If we are going to be able to omit the AND below, we must do our
6587 operations as unsigned. If we must use the AND, we have a choice.
6588 Normally unsigned is faster, but for some machines signed is. */
6589 #ifdef LOAD_EXTEND_OP
6590 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6591 && !flag_syntax_only) ? 0 : 1;
6596 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6597 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6598 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6599 inner = fold_convert (intermediate_type, inner);
6602 inner = build2 (RSHIFT_EXPR, intermediate_type,
6603 inner, size_int (bitnum));
6605 one = build_int_cst (intermediate_type, 1);
6607 if (code == EQ_EXPR)
6608 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6610 /* Put the AND last so it can combine with more things. */
6611 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6613 /* Make sure to return the proper type. */
6614 inner = fold_convert (result_type, inner);
6621 /* Check whether we are allowed to reorder operands arg0 and arg1,
6622 such that the evaluation of arg1 occurs before arg0. */
6625 reorder_operands_p (const_tree arg0, const_tree arg1)
6627 if (! flag_evaluation_order)
6629 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6631 return ! TREE_SIDE_EFFECTS (arg0)
6632 && ! TREE_SIDE_EFFECTS (arg1);
6635 /* Test whether it is preferable two swap two operands, ARG0 and
6636 ARG1, for example because ARG0 is an integer constant and ARG1
6637 isn't. If REORDER is true, only recommend swapping if we can
6638 evaluate the operands in reverse order. */
6641 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6643 STRIP_SIGN_NOPS (arg0);
6644 STRIP_SIGN_NOPS (arg1);
6646 if (TREE_CODE (arg1) == INTEGER_CST)
6648 if (TREE_CODE (arg0) == INTEGER_CST)
6651 if (TREE_CODE (arg1) == REAL_CST)
6653 if (TREE_CODE (arg0) == REAL_CST)
6656 if (TREE_CODE (arg1) == FIXED_CST)
6658 if (TREE_CODE (arg0) == FIXED_CST)
6661 if (TREE_CODE (arg1) == COMPLEX_CST)
6663 if (TREE_CODE (arg0) == COMPLEX_CST)
6666 if (TREE_CONSTANT (arg1))
6668 if (TREE_CONSTANT (arg0))
6674 if (reorder && flag_evaluation_order
6675 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6678 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6679 for commutative and comparison operators. Ensuring a canonical
6680 form allows the optimizers to find additional redundancies without
6681 having to explicitly check for both orderings. */
6682 if (TREE_CODE (arg0) == SSA_NAME
6683 && TREE_CODE (arg1) == SSA_NAME
6684 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6687 /* Put SSA_NAMEs last. */
6688 if (TREE_CODE (arg1) == SSA_NAME)
6690 if (TREE_CODE (arg0) == SSA_NAME)
6693 /* Put variables last. */
6702 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6703 ARG0 is extended to a wider type. */
6706 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6708 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6710 tree shorter_type, outer_type;
6714 if (arg0_unw == arg0)
6716 shorter_type = TREE_TYPE (arg0_unw);
6718 #ifdef HAVE_canonicalize_funcptr_for_compare
6719 /* Disable this optimization if we're casting a function pointer
6720 type on targets that require function pointer canonicalization. */
6721 if (HAVE_canonicalize_funcptr_for_compare
6722 && TREE_CODE (shorter_type) == POINTER_TYPE
6723 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6727 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6730 arg1_unw = get_unwidened (arg1, NULL_TREE);
6732 /* If possible, express the comparison in the shorter mode. */
6733 if ((code == EQ_EXPR || code == NE_EXPR
6734 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6735 && (TREE_TYPE (arg1_unw) == shorter_type
6736 || (TYPE_PRECISION (shorter_type)
6737 > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6738 || ((TYPE_PRECISION (shorter_type)
6739 == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6740 && (TYPE_UNSIGNED (shorter_type)
6741 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6742 || (TREE_CODE (arg1_unw) == INTEGER_CST
6743 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6744 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6745 && int_fits_type_p (arg1_unw, shorter_type))))
6746 return fold_build2 (code, type, arg0_unw,
6747 fold_convert (shorter_type, arg1_unw));
6749 if (TREE_CODE (arg1_unw) != INTEGER_CST
6750 || TREE_CODE (shorter_type) != INTEGER_TYPE
6751 || !int_fits_type_p (arg1_unw, shorter_type))
6754 /* If we are comparing with the integer that does not fit into the range
6755 of the shorter type, the result is known. */
6756 outer_type = TREE_TYPE (arg1_unw);
6757 min = lower_bound_in_type (outer_type, shorter_type);
6758 max = upper_bound_in_type (outer_type, shorter_type);
6760 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6762 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6769 return omit_one_operand (type, integer_zero_node, arg0);
6774 return omit_one_operand (type, integer_one_node, arg0);
6780 return omit_one_operand (type, integer_one_node, arg0);
6782 return omit_one_operand (type, integer_zero_node, arg0);
6787 return omit_one_operand (type, integer_zero_node, arg0);
6789 return omit_one_operand (type, integer_one_node, arg0);
6798 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6799 ARG0 just the signedness is changed. */
6802 fold_sign_changed_comparison (enum tree_code code, tree type,
6803 tree arg0, tree arg1)
6806 tree inner_type, outer_type;
6808 if (!CONVERT_EXPR_P (arg0))
6811 outer_type = TREE_TYPE (arg0);
6812 arg0_inner = TREE_OPERAND (arg0, 0);
6813 inner_type = TREE_TYPE (arg0_inner);
6815 #ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6824 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6827 /* If the conversion is from an integral subtype to its basetype
6829 if (TREE_TYPE (inner_type) == outer_type)
6832 if (TREE_CODE (arg1) != INTEGER_CST
6833 && !(CONVERT_EXPR_P (arg1)
6834 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6837 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6838 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6843 if (TREE_CODE (arg1) == INTEGER_CST)
6844 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6845 TREE_INT_CST_HIGH (arg1), 0,
6846 TREE_OVERFLOW (arg1));
6848 arg1 = fold_convert (inner_type, arg1);
6850 return fold_build2 (code, type, arg0_inner, arg1);
6853 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6854 step of the array. Reconstructs s and delta in the case of s * delta
6855 being an integer constant (and thus already folded).
6856 ADDR is the address. MULT is the multiplicative expression.
6857 If the function succeeds, the new address expression is returned. Otherwise
6858 NULL_TREE is returned. */
6861 try_move_mult_to_index (tree addr, tree op1)
6863 tree s, delta, step;
6864 tree ref = TREE_OPERAND (addr, 0), pref;
6869 /* Strip the nops that might be added when converting op1 to sizetype. */
6872 /* Canonicalize op1 into a possibly non-constant delta
6873 and an INTEGER_CST s. */
6874 if (TREE_CODE (op1) == MULT_EXPR)
6876 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6881 if (TREE_CODE (arg0) == INTEGER_CST)
6886 else if (TREE_CODE (arg1) == INTEGER_CST)
6894 else if (TREE_CODE (op1) == INTEGER_CST)
6901 /* Simulate we are delta * 1. */
6903 s = integer_one_node;
6906 for (;; ref = TREE_OPERAND (ref, 0))
6908 if (TREE_CODE (ref) == ARRAY_REF)
6910 /* Remember if this was a multi-dimensional array. */
6911 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6914 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6918 step = array_ref_element_size (ref);
6919 if (TREE_CODE (step) != INTEGER_CST)
6924 if (! tree_int_cst_equal (step, s))
6929 /* Try if delta is a multiple of step. */
6930 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6936 /* Only fold here if we can verify we do not overflow one
6937 dimension of a multi-dimensional array. */
6942 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6943 || !INTEGRAL_TYPE_P (itype)
6944 || !TYPE_MAX_VALUE (itype)
6945 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6948 tmp = fold_binary (PLUS_EXPR, itype,
6949 fold_convert (itype,
6950 TREE_OPERAND (ref, 1)),
6951 fold_convert (itype, delta));
6953 || TREE_CODE (tmp) != INTEGER_CST
6954 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6963 if (!handled_component_p (ref))
6967 /* We found the suitable array reference. So copy everything up to it,
6968 and replace the index. */
6970 pref = TREE_OPERAND (addr, 0);
6971 ret = copy_node (pref);
6976 pref = TREE_OPERAND (pref, 0);
6977 TREE_OPERAND (pos, 0) = copy_node (pref);
6978 pos = TREE_OPERAND (pos, 0);
6981 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6982 fold_convert (itype,
6983 TREE_OPERAND (pos, 1)),
6984 fold_convert (itype, delta));
6986 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6990 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6991 means A >= Y && A != MAX, but in this case we know that
6992 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6995 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6997 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6999 if (TREE_CODE (bound) == LT_EXPR)
7000 a = TREE_OPERAND (bound, 0);
7001 else if (TREE_CODE (bound) == GT_EXPR)
7002 a = TREE_OPERAND (bound, 1);
7006 typea = TREE_TYPE (a);
7007 if (!INTEGRAL_TYPE_P (typea)
7008 && !POINTER_TYPE_P (typea))
7011 if (TREE_CODE (ineq) == LT_EXPR)
7013 a1 = TREE_OPERAND (ineq, 1);
7014 y = TREE_OPERAND (ineq, 0);
7016 else if (TREE_CODE (ineq) == GT_EXPR)
7018 a1 = TREE_OPERAND (ineq, 0);
7019 y = TREE_OPERAND (ineq, 1);
7024 if (TREE_TYPE (a1) != typea)
7027 if (POINTER_TYPE_P (typea))
7029 /* Convert the pointer types into integer before taking the difference. */
7030 tree ta = fold_convert (ssizetype, a);
7031 tree ta1 = fold_convert (ssizetype, a1);
7032 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7035 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7037 if (!diff || !integer_onep (diff))
7040 return fold_build2 (GE_EXPR, type, a, y);
7043 /* Fold a sum or difference of at least one multiplication.
7044 Returns the folded tree or NULL if no simplification could be made. */
7047 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7049 tree arg00, arg01, arg10, arg11;
7050 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7052 /* (A * C) +- (B * C) -> (A+-B) * C.
7053 (A * C) +- A -> A * (C+-1).
7054 We are most concerned about the case where C is a constant,
7055 but other combinations show up during loop reduction. Since
7056 it is not difficult, try all four possibilities. */
7058 if (TREE_CODE (arg0) == MULT_EXPR)
7060 arg00 = TREE_OPERAND (arg0, 0);
7061 arg01 = TREE_OPERAND (arg0, 1);
7063 else if (TREE_CODE (arg0) == INTEGER_CST)
7065 arg00 = build_one_cst (type);
7070 /* We cannot generate constant 1 for fract. */
7071 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7074 arg01 = build_one_cst (type);
7076 if (TREE_CODE (arg1) == MULT_EXPR)
7078 arg10 = TREE_OPERAND (arg1, 0);
7079 arg11 = TREE_OPERAND (arg1, 1);
7081 else if (TREE_CODE (arg1) == INTEGER_CST)
7083 arg10 = build_one_cst (type);
7088 /* We cannot generate constant 1 for fract. */
7089 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7092 arg11 = build_one_cst (type);
7096 if (operand_equal_p (arg01, arg11, 0))
7097 same = arg01, alt0 = arg00, alt1 = arg10;
7098 else if (operand_equal_p (arg00, arg10, 0))
7099 same = arg00, alt0 = arg01, alt1 = arg11;
7100 else if (operand_equal_p (arg00, arg11, 0))
7101 same = arg00, alt0 = arg01, alt1 = arg10;
7102 else if (operand_equal_p (arg01, arg10, 0))
7103 same = arg01, alt0 = arg00, alt1 = arg11;
7105 /* No identical multiplicands; see if we can find a common
7106 power-of-two factor in non-power-of-two multiplies. This
7107 can help in multi-dimensional array access. */
7108 else if (host_integerp (arg01, 0)
7109 && host_integerp (arg11, 0))
7111 HOST_WIDE_INT int01, int11, tmp;
7114 int01 = TREE_INT_CST_LOW (arg01);
7115 int11 = TREE_INT_CST_LOW (arg11);
7117 /* Move min of absolute values to int11. */
7118 if ((int01 >= 0 ? int01 : -int01)
7119 < (int11 >= 0 ? int11 : -int11))
7121 tmp = int01, int01 = int11, int11 = tmp;
7122 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7129 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7131 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7132 build_int_cst (TREE_TYPE (arg00),
7137 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7142 return fold_build2 (MULT_EXPR, type,
7143 fold_build2 (code, type,
7144 fold_convert (type, alt0),
7145 fold_convert (type, alt1)),
7146 fold_convert (type, same));
7151 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7157 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7159 tree type = TREE_TYPE (expr);
7160 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7161 int byte, offset, word, words;
7162 unsigned char value;
7164 if (total_bytes > len)
7166 words = total_bytes / UNITS_PER_WORD;
7168 for (byte = 0; byte < total_bytes; byte++)
7170 int bitpos = byte * BITS_PER_UNIT;
7171 if (bitpos < HOST_BITS_PER_WIDE_INT)
7172 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7174 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7175 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7177 if (total_bytes > UNITS_PER_WORD)
7179 word = byte / UNITS_PER_WORD;
7180 if (WORDS_BIG_ENDIAN)
7181 word = (words - 1) - word;
7182 offset = word * UNITS_PER_WORD;
7183 if (BYTES_BIG_ENDIAN)
7184 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7186 offset += byte % UNITS_PER_WORD;
7189 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7190 ptr[offset] = value;
7196 /* Subroutine of native_encode_expr. Encode the REAL_CST
7197 specified by EXPR into the buffer PTR of length LEN bytes.
7198 Return the number of bytes placed in the buffer, or zero
7202 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7204 tree type = TREE_TYPE (expr);
7205 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7206 int byte, offset, word, words, bitpos;
7207 unsigned char value;
7209 /* There are always 32 bits in each long, no matter the size of
7210 the hosts long. We handle floating point representations with
7214 if (total_bytes > len)
7216 words = 32 / UNITS_PER_WORD;
7218 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7220 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7221 bitpos += BITS_PER_UNIT)
7223 byte = (bitpos / BITS_PER_UNIT) & 3;
7224 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7226 if (UNITS_PER_WORD < 4)
7228 word = byte / UNITS_PER_WORD;
7229 if (WORDS_BIG_ENDIAN)
7230 word = (words - 1) - word;
7231 offset = word * UNITS_PER_WORD;
7232 if (BYTES_BIG_ENDIAN)
7233 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7235 offset += byte % UNITS_PER_WORD;
7238 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7239 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7244 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7245 specified by EXPR into the buffer PTR of length LEN bytes.
7246 Return the number of bytes placed in the buffer, or zero
7250 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7255 part = TREE_REALPART (expr);
7256 rsize = native_encode_expr (part, ptr, len);
7259 part = TREE_IMAGPART (expr);
7260 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7263 return rsize + isize;
7267 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7268 specified by EXPR into the buffer PTR of length LEN bytes.
7269 Return the number of bytes placed in the buffer, or zero
7273 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7275 int i, size, offset, count;
7276 tree itype, elem, elements;
7279 elements = TREE_VECTOR_CST_ELTS (expr);
7280 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7281 itype = TREE_TYPE (TREE_TYPE (expr));
7282 size = GET_MODE_SIZE (TYPE_MODE (itype));
7283 for (i = 0; i < count; i++)
7287 elem = TREE_VALUE (elements);
7288 elements = TREE_CHAIN (elements);
7295 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7300 if (offset + size > len)
7302 memset (ptr+offset, 0, size);
7310 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7311 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7312 buffer PTR of length LEN bytes. Return the number of bytes
7313 placed in the buffer, or zero upon failure. */
7316 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7318 switch (TREE_CODE (expr))
7321 return native_encode_int (expr, ptr, len);
7324 return native_encode_real (expr, ptr, len);
7327 return native_encode_complex (expr, ptr, len);
7330 return native_encode_vector (expr, ptr, len);
7338 /* Subroutine of native_interpret_expr. Interpret the contents of
7339 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7340 If the buffer cannot be interpreted, return NULL_TREE. */
7343 native_interpret_int (tree type, const unsigned char *ptr, int len)
7345 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7346 int byte, offset, word, words;
7347 unsigned char value;
7348 unsigned int HOST_WIDE_INT lo = 0;
7349 HOST_WIDE_INT hi = 0;
7351 if (total_bytes > len)
7353 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7355 words = total_bytes / UNITS_PER_WORD;
7357 for (byte = 0; byte < total_bytes; byte++)
7359 int bitpos = byte * BITS_PER_UNIT;
7360 if (total_bytes > UNITS_PER_WORD)
7362 word = byte / UNITS_PER_WORD;
7363 if (WORDS_BIG_ENDIAN)
7364 word = (words - 1) - word;
7365 offset = word * UNITS_PER_WORD;
7366 if (BYTES_BIG_ENDIAN)
7367 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7369 offset += byte % UNITS_PER_WORD;
7372 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7373 value = ptr[offset];
7375 if (bitpos < HOST_BITS_PER_WIDE_INT)
7376 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7378 hi |= (unsigned HOST_WIDE_INT) value
7379 << (bitpos - HOST_BITS_PER_WIDE_INT);
7382 return build_int_cst_wide_type (type, lo, hi);
7386 /* Subroutine of native_interpret_expr. Interpret the contents of
7387 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7388 If the buffer cannot be interpreted, return NULL_TREE. */
7391 native_interpret_real (tree type, const unsigned char *ptr, int len)
7393 enum machine_mode mode = TYPE_MODE (type);
7394 int total_bytes = GET_MODE_SIZE (mode);
7395 int byte, offset, word, words, bitpos;
7396 unsigned char value;
7397 /* There are always 32 bits in each long, no matter the size of
7398 the hosts long. We handle floating point representations with
7403 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7404 if (total_bytes > len || total_bytes > 24)
7406 words = 32 / UNITS_PER_WORD;
7408 memset (tmp, 0, sizeof (tmp));
7409 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7410 bitpos += BITS_PER_UNIT)
7412 byte = (bitpos / BITS_PER_UNIT) & 3;
7413 if (UNITS_PER_WORD < 4)
7415 word = byte / UNITS_PER_WORD;
7416 if (WORDS_BIG_ENDIAN)
7417 word = (words - 1) - word;
7418 offset = word * UNITS_PER_WORD;
7419 if (BYTES_BIG_ENDIAN)
7420 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7422 offset += byte % UNITS_PER_WORD;
7425 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7426 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7428 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7431 real_from_target (&r, tmp, mode);
7432 return build_real (type, r);
7436 /* Subroutine of native_interpret_expr. Interpret the contents of
7437 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7438 If the buffer cannot be interpreted, return NULL_TREE. */
7441 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7443 tree etype, rpart, ipart;
7446 etype = TREE_TYPE (type);
7447 size = GET_MODE_SIZE (TYPE_MODE (etype));
7450 rpart = native_interpret_expr (etype, ptr, size);
7453 ipart = native_interpret_expr (etype, ptr+size, size);
7456 return build_complex (type, rpart, ipart);
7460 /* Subroutine of native_interpret_expr. Interpret the contents of
7461 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7462 If the buffer cannot be interpreted, return NULL_TREE. */
7465 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7467 tree etype, elem, elements;
7470 etype = TREE_TYPE (type);
7471 size = GET_MODE_SIZE (TYPE_MODE (etype));
7472 count = TYPE_VECTOR_SUBPARTS (type);
7473 if (size * count > len)
7476 elements = NULL_TREE;
7477 for (i = count - 1; i >= 0; i--)
7479 elem = native_interpret_expr (etype, ptr+(i*size), size);
7482 elements = tree_cons (NULL_TREE, elem, elements);
7484 return build_vector (type, elements);
7488 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7489 the buffer PTR of length LEN as a constant of type TYPE. For
7490 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7491 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7492 return NULL_TREE. */
7495 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7497 switch (TREE_CODE (type))
7502 return native_interpret_int (type, ptr, len);
7505 return native_interpret_real (type, ptr, len);
7508 return native_interpret_complex (type, ptr, len);
7511 return native_interpret_vector (type, ptr, len);
7519 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7520 TYPE at compile-time. If we're unable to perform the conversion
7521 return NULL_TREE. */
7524 fold_view_convert_expr (tree type, tree expr)
7526 /* We support up to 512-bit values (for V8DFmode). */
7527 unsigned char buffer[64];
7530 /* Check that the host and target are sane. */
7531 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7534 len = native_encode_expr (expr, buffer, sizeof (buffer));
7538 return native_interpret_expr (type, buffer, len);
7541 /* Build an expression for the address of T. Folds away INDIRECT_REF
7542 to avoid confusing the gimplify process. When IN_FOLD is true
7543 avoid modifications of T. */
7546 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7548 /* The size of the object is not relevant when talking about its address. */
7549 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7550 t = TREE_OPERAND (t, 0);
7552 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7553 if (TREE_CODE (t) == INDIRECT_REF
7554 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7556 t = TREE_OPERAND (t, 0);
7558 if (TREE_TYPE (t) != ptrtype)
7559 t = build1 (NOP_EXPR, ptrtype, t);
7565 while (handled_component_p (base))
7566 base = TREE_OPERAND (base, 0);
7569 TREE_ADDRESSABLE (base) = 1;
7571 t = build1 (ADDR_EXPR, ptrtype, t);
7574 t = build1 (ADDR_EXPR, ptrtype, t);
7579 /* Build an expression for the address of T with type PTRTYPE. This
7580 function modifies the input parameter 'T' by sometimes setting the
7581 TREE_ADDRESSABLE flag. */
7584 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7586 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7589 /* Build an expression for the address of T. This function modifies
7590 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7591 flag. When called from fold functions, use fold_addr_expr instead. */
7594 build_fold_addr_expr (tree t)
7596 return build_fold_addr_expr_with_type_1 (t,
7597 build_pointer_type (TREE_TYPE (t)),
7601 /* Same as build_fold_addr_expr, builds an expression for the address
7602 of T, but avoids touching the input node 't'. Fold functions
7603 should use this version. */
7606 fold_addr_expr (tree t)
7608 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7610 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7613 /* Fold a unary expression of code CODE and type TYPE with operand
7614 OP0. Return the folded expression if folding is successful.
7615 Otherwise, return NULL_TREE. */
7618 fold_unary (enum tree_code code, tree type, tree op0)
7622 enum tree_code_class kind = TREE_CODE_CLASS (code);
7624 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7625 && TREE_CODE_LENGTH (code) == 1);
7630 if (code == NOP_EXPR || code == CONVERT_EXPR
7631 || code == FLOAT_EXPR || code == ABS_EXPR)
7633 /* Don't use STRIP_NOPS, because signedness of argument type
7635 STRIP_SIGN_NOPS (arg0);
7639 /* Strip any conversions that don't change the mode. This
7640 is safe for every expression, except for a comparison
7641 expression because its signedness is derived from its
7644 Note that this is done as an internal manipulation within
7645 the constant folder, in order to find the simplest
7646 representation of the arguments so that their form can be
7647 studied. In any cases, the appropriate type conversions
7648 should be put back in the tree that will get out of the
7654 if (TREE_CODE_CLASS (code) == tcc_unary)
7656 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7657 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7658 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7659 else if (TREE_CODE (arg0) == COND_EXPR)
7661 tree arg01 = TREE_OPERAND (arg0, 1);
7662 tree arg02 = TREE_OPERAND (arg0, 2);
7663 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7664 arg01 = fold_build1 (code, type, arg01);
7665 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7666 arg02 = fold_build1 (code, type, arg02);
7667 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7670 /* If this was a conversion, and all we did was to move into
7671 inside the COND_EXPR, bring it back out. But leave it if
7672 it is a conversion from integer to integer and the
7673 result precision is no wider than a word since such a
7674 conversion is cheap and may be optimized away by combine,
7675 while it couldn't if it were outside the COND_EXPR. Then return
7676 so we don't get into an infinite recursion loop taking the
7677 conversion out and then back in. */
7679 if ((code == NOP_EXPR || code == CONVERT_EXPR
7680 || code == NON_LVALUE_EXPR)
7681 && TREE_CODE (tem) == COND_EXPR
7682 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7683 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7684 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7685 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7686 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7687 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7688 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7690 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7691 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7692 || flag_syntax_only))
7693 tem = build1 (code, type,
7695 TREE_TYPE (TREE_OPERAND
7696 (TREE_OPERAND (tem, 1), 0)),
7697 TREE_OPERAND (tem, 0),
7698 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7699 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7702 else if (COMPARISON_CLASS_P (arg0))
7704 if (TREE_CODE (type) == BOOLEAN_TYPE)
7706 arg0 = copy_node (arg0);
7707 TREE_TYPE (arg0) = type;
7710 else if (TREE_CODE (type) != INTEGER_TYPE)
7711 return fold_build3 (COND_EXPR, type, arg0,
7712 fold_build1 (code, type,
7714 fold_build1 (code, type,
7715 integer_zero_node));
7722 /* Re-association barriers around constants and other re-association
7723 barriers can be removed. */
7724 if (CONSTANT_CLASS_P (op0)
7725 || TREE_CODE (op0) == PAREN_EXPR)
7726 return fold_convert (type, op0);
7731 case FIX_TRUNC_EXPR:
7732 if (TREE_TYPE (op0) == type)
7735 /* If we have (type) (a CMP b) and type is an integral type, return
7736 new expression involving the new type. */
7737 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7738 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7739 TREE_OPERAND (op0, 1));
7741 /* Handle cases of two conversions in a row. */
7742 if (CONVERT_EXPR_P (op0))
7744 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7745 tree inter_type = TREE_TYPE (op0);
7746 int inside_int = INTEGRAL_TYPE_P (inside_type);
7747 int inside_ptr = POINTER_TYPE_P (inside_type);
7748 int inside_float = FLOAT_TYPE_P (inside_type);
7749 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7750 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7751 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7752 int inter_int = INTEGRAL_TYPE_P (inter_type);
7753 int inter_ptr = POINTER_TYPE_P (inter_type);
7754 int inter_float = FLOAT_TYPE_P (inter_type);
7755 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7756 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7757 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7758 int final_int = INTEGRAL_TYPE_P (type);
7759 int final_ptr = POINTER_TYPE_P (type);
7760 int final_float = FLOAT_TYPE_P (type);
7761 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7762 unsigned int final_prec = TYPE_PRECISION (type);
7763 int final_unsignedp = TYPE_UNSIGNED (type);
7765 /* In addition to the cases of two conversions in a row
7766 handled below, if we are converting something to its own
7767 type via an object of identical or wider precision, neither
7768 conversion is needed. */
7769 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7770 && (((inter_int || inter_ptr) && final_int)
7771 || (inter_float && final_float))
7772 && inter_prec >= final_prec)
7773 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7775 /* Likewise, if the intermediate and final types are either both
7776 float or both integer, we don't need the middle conversion if
7777 it is wider than the final type and doesn't change the signedness
7778 (for integers). Avoid this if the final type is a pointer
7779 since then we sometimes need the inner conversion. Likewise if
7780 the outer has a precision not equal to the size of its mode. */
7781 if (((inter_int && inside_int)
7782 || (inter_float && inside_float)
7783 || (inter_vec && inside_vec))
7784 && inter_prec >= inside_prec
7785 && (inter_float || inter_vec
7786 || inter_unsignedp == inside_unsignedp)
7787 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7788 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7790 && (! final_vec || inter_prec == inside_prec))
7791 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7793 /* If we have a sign-extension of a zero-extended value, we can
7794 replace that by a single zero-extension. */
7795 if (inside_int && inter_int && final_int
7796 && inside_prec < inter_prec && inter_prec < final_prec
7797 && inside_unsignedp && !inter_unsignedp)
7798 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7800 /* Two conversions in a row are not needed unless:
7801 - some conversion is floating-point (overstrict for now), or
7802 - some conversion is a vector (overstrict for now), or
7803 - the intermediate type is narrower than both initial and
7805 - the intermediate type and innermost type differ in signedness,
7806 and the outermost type is wider than the intermediate, or
7807 - the initial type is a pointer type and the precisions of the
7808 intermediate and final types differ, or
7809 - the final type is a pointer type and the precisions of the
7810 initial and intermediate types differ. */
7811 if (! inside_float && ! inter_float && ! final_float
7812 && ! inside_vec && ! inter_vec && ! final_vec
7813 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7814 && ! (inside_int && inter_int
7815 && inter_unsignedp != inside_unsignedp
7816 && inter_prec < final_prec)
7817 && ((inter_unsignedp && inter_prec > inside_prec)
7818 == (final_unsignedp && final_prec > inter_prec))
7819 && ! (inside_ptr && inter_prec != final_prec)
7820 && ! (final_ptr && inside_prec != inter_prec)
7821 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7822 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7823 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7826 /* Handle (T *)&A.B.C for A being of type T and B and C
7827 living at offset zero. This occurs frequently in
7828 C++ upcasting and then accessing the base. */
7829 if (TREE_CODE (op0) == ADDR_EXPR
7830 && POINTER_TYPE_P (type)
7831 && handled_component_p (TREE_OPERAND (op0, 0)))
7833 HOST_WIDE_INT bitsize, bitpos;
7835 enum machine_mode mode;
7836 int unsignedp, volatilep;
7837 tree base = TREE_OPERAND (op0, 0);
7838 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7839 &mode, &unsignedp, &volatilep, false);
7840 /* If the reference was to a (constant) zero offset, we can use
7841 the address of the base if it has the same base type
7842 as the result type. */
7843 if (! offset && bitpos == 0
7844 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7845 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7846 return fold_convert (type, fold_addr_expr (base));
7849 if (TREE_CODE (op0) == MODIFY_EXPR
7850 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7851 /* Detect assigning a bitfield. */
7852 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7854 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7856 /* Don't leave an assignment inside a conversion
7857 unless assigning a bitfield. */
7858 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7859 /* First do the assignment, then return converted constant. */
7860 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7861 TREE_NO_WARNING (tem) = 1;
7862 TREE_USED (tem) = 1;
7866 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7867 constants (if x has signed type, the sign bit cannot be set
7868 in c). This folds extension into the BIT_AND_EXPR.
7869 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7870 very likely don't have maximal range for their precision and this
7871 transformation effectively doesn't preserve non-maximal ranges. */
7872 if (TREE_CODE (type) == INTEGER_TYPE
7873 && TREE_CODE (op0) == BIT_AND_EXPR
7874 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7877 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7880 if (TYPE_UNSIGNED (TREE_TYPE (and))
7881 || (TYPE_PRECISION (type)
7882 <= TYPE_PRECISION (TREE_TYPE (and))))
7884 else if (TYPE_PRECISION (TREE_TYPE (and1))
7885 <= HOST_BITS_PER_WIDE_INT
7886 && host_integerp (and1, 1))
7888 unsigned HOST_WIDE_INT cst;
7890 cst = tree_low_cst (and1, 1);
7891 cst &= (HOST_WIDE_INT) -1
7892 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7893 change = (cst == 0);
7894 #ifdef LOAD_EXTEND_OP
7896 && !flag_syntax_only
7897 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7900 tree uns = unsigned_type_for (TREE_TYPE (and0));
7901 and0 = fold_convert (uns, and0);
7902 and1 = fold_convert (uns, and1);
7908 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7909 TREE_INT_CST_HIGH (and1), 0,
7910 TREE_OVERFLOW (and1));
7911 return fold_build2 (BIT_AND_EXPR, type,
7912 fold_convert (type, and0), tem);
7916 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7917 when one of the new casts will fold away. Conservatively we assume
7918 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7919 if (POINTER_TYPE_P (type)
7920 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7921 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7922 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7923 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7925 tree arg00 = TREE_OPERAND (arg0, 0);
7926 tree arg01 = TREE_OPERAND (arg0, 1);
7928 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7929 fold_convert (sizetype, arg01));
7932 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7933 of the same precision, and X is an integer type not narrower than
7934 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7935 if (INTEGRAL_TYPE_P (type)
7936 && TREE_CODE (op0) == BIT_NOT_EXPR
7937 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7938 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7939 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7941 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7942 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7943 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7944 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7947 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7948 type of X and Y (integer types only). */
7949 if (INTEGRAL_TYPE_P (type)
7950 && TREE_CODE (op0) == MULT_EXPR
7951 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7952 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7954 /* Be careful not to introduce new overflows. */
7956 if (TYPE_OVERFLOW_WRAPS (type))
7959 mult_type = unsigned_type_for (type);
7961 tem = fold_build2 (MULT_EXPR, mult_type,
7962 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7963 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7964 return fold_convert (type, tem);
7967 tem = fold_convert_const (code, type, op0);
7968 return tem ? tem : NULL_TREE;
7970 case FIXED_CONVERT_EXPR:
7971 tem = fold_convert_const (code, type, arg0);
7972 return tem ? tem : NULL_TREE;
7974 case VIEW_CONVERT_EXPR:
7975 if (TREE_TYPE (op0) == type)
7977 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7978 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7980 /* For integral conversions with the same precision or pointer
7981 conversions use a NOP_EXPR instead. */
7982 if ((INTEGRAL_TYPE_P (type)
7983 || POINTER_TYPE_P (type))
7984 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7985 || POINTER_TYPE_P (TREE_TYPE (op0)))
7986 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7987 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7988 a sub-type to its base type as generated by the Ada FE. */
7989 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7990 && TREE_TYPE (TREE_TYPE (op0))))
7991 return fold_convert (type, op0);
7993 /* Strip inner integral conversions that do not change the precision. */
7994 if (CONVERT_EXPR_P (op0)
7995 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7996 || POINTER_TYPE_P (TREE_TYPE (op0)))
7997 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7998 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7999 && (TYPE_PRECISION (TREE_TYPE (op0))
8000 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8001 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8003 return fold_view_convert_expr (type, op0);
8006 tem = fold_negate_expr (arg0);
8008 return fold_convert (type, tem);
8012 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8013 return fold_abs_const (arg0, type);
8014 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8015 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8016 /* Convert fabs((double)float) into (double)fabsf(float). */
8017 else if (TREE_CODE (arg0) == NOP_EXPR
8018 && TREE_CODE (type) == REAL_TYPE)
8020 tree targ0 = strip_float_extensions (arg0);
8022 return fold_convert (type, fold_build1 (ABS_EXPR,
8026 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8027 else if (TREE_CODE (arg0) == ABS_EXPR)
8029 else if (tree_expr_nonnegative_p (arg0))
8032 /* Strip sign ops from argument. */
8033 if (TREE_CODE (type) == REAL_TYPE)
8035 tem = fold_strip_sign_ops (arg0);
8037 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8042 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8043 return fold_convert (type, arg0);
8044 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8046 tree itype = TREE_TYPE (type);
8047 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8048 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8049 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8051 if (TREE_CODE (arg0) == COMPLEX_CST)
8053 tree itype = TREE_TYPE (type);
8054 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8055 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8056 return build_complex (type, rpart, negate_expr (ipart));
8058 if (TREE_CODE (arg0) == CONJ_EXPR)
8059 return fold_convert (type, TREE_OPERAND (arg0, 0));
8063 if (TREE_CODE (arg0) == INTEGER_CST)
8064 return fold_not_const (arg0, type);
8065 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8066 return fold_convert (type, TREE_OPERAND (arg0, 0));
8067 /* Convert ~ (-A) to A - 1. */
8068 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8069 return fold_build2 (MINUS_EXPR, type,
8070 fold_convert (type, TREE_OPERAND (arg0, 0)),
8071 build_int_cst (type, 1));
8072 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8073 else if (INTEGRAL_TYPE_P (type)
8074 && ((TREE_CODE (arg0) == MINUS_EXPR
8075 && integer_onep (TREE_OPERAND (arg0, 1)))
8076 || (TREE_CODE (arg0) == PLUS_EXPR
8077 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8078 return fold_build1 (NEGATE_EXPR, type,
8079 fold_convert (type, TREE_OPERAND (arg0, 0)));
8080 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8081 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8082 && (tem = fold_unary (BIT_NOT_EXPR, type,
8084 TREE_OPERAND (arg0, 0)))))
8085 return fold_build2 (BIT_XOR_EXPR, type, tem,
8086 fold_convert (type, TREE_OPERAND (arg0, 1)));
8087 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8088 && (tem = fold_unary (BIT_NOT_EXPR, type,
8090 TREE_OPERAND (arg0, 1)))))
8091 return fold_build2 (BIT_XOR_EXPR, type,
8092 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8093 /* Perform BIT_NOT_EXPR on each element individually. */
8094 else if (TREE_CODE (arg0) == VECTOR_CST)
8096 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8097 int count = TYPE_VECTOR_SUBPARTS (type), i;
8099 for (i = 0; i < count; i++)
8103 elem = TREE_VALUE (elements);
8104 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8105 if (elem == NULL_TREE)
8107 elements = TREE_CHAIN (elements);
8110 elem = build_int_cst (TREE_TYPE (type), -1);
8111 list = tree_cons (NULL_TREE, elem, list);
8114 return build_vector (type, nreverse (list));
8119 case TRUTH_NOT_EXPR:
8120 /* The argument to invert_truthvalue must have Boolean type. */
8121 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8122 arg0 = fold_convert (boolean_type_node, arg0);
8124 /* Note that the operand of this must be an int
8125 and its values must be 0 or 1.
8126 ("true" is a fixed value perhaps depending on the language,
8127 but we don't handle values other than 1 correctly yet.) */
8128 tem = fold_truth_not_expr (arg0);
8131 return fold_convert (type, tem);
8134 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8135 return fold_convert (type, arg0);
8136 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8137 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8138 TREE_OPERAND (arg0, 1));
8139 if (TREE_CODE (arg0) == COMPLEX_CST)
8140 return fold_convert (type, TREE_REALPART (arg0));
8141 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8143 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8144 tem = fold_build2 (TREE_CODE (arg0), itype,
8145 fold_build1 (REALPART_EXPR, itype,
8146 TREE_OPERAND (arg0, 0)),
8147 fold_build1 (REALPART_EXPR, itype,
8148 TREE_OPERAND (arg0, 1)));
8149 return fold_convert (type, tem);
8151 if (TREE_CODE (arg0) == CONJ_EXPR)
8153 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8154 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8155 return fold_convert (type, tem);
8157 if (TREE_CODE (arg0) == CALL_EXPR)
8159 tree fn = get_callee_fndecl (arg0);
8160 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8161 switch (DECL_FUNCTION_CODE (fn))
8163 CASE_FLT_FN (BUILT_IN_CEXPI):
8164 fn = mathfn_built_in (type, BUILT_IN_COS);
8166 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8176 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8177 return fold_convert (type, integer_zero_node);
8178 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8179 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8180 TREE_OPERAND (arg0, 0));
8181 if (TREE_CODE (arg0) == COMPLEX_CST)
8182 return fold_convert (type, TREE_IMAGPART (arg0));
8183 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8185 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8186 tem = fold_build2 (TREE_CODE (arg0), itype,
8187 fold_build1 (IMAGPART_EXPR, itype,
8188 TREE_OPERAND (arg0, 0)),
8189 fold_build1 (IMAGPART_EXPR, itype,
8190 TREE_OPERAND (arg0, 1)));
8191 return fold_convert (type, tem);
8193 if (TREE_CODE (arg0) == CONJ_EXPR)
8195 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8196 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8197 return fold_convert (type, negate_expr (tem));
8199 if (TREE_CODE (arg0) == CALL_EXPR)
8201 tree fn = get_callee_fndecl (arg0);
8202 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8203 switch (DECL_FUNCTION_CODE (fn))
8205 CASE_FLT_FN (BUILT_IN_CEXPI):
8206 fn = mathfn_built_in (type, BUILT_IN_SIN);
8208 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8219 } /* switch (code) */
8222 /* Fold a binary expression of code CODE and type TYPE with operands
8223 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8224 Return the folded expression if folding is successful. Otherwise,
8225 return NULL_TREE. */
8228 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8230 enum tree_code compl_code;
8232 if (code == MIN_EXPR)
8233 compl_code = MAX_EXPR;
8234 else if (code == MAX_EXPR)
8235 compl_code = MIN_EXPR;
8239 /* MIN (MAX (a, b), b) == b. */
8240 if (TREE_CODE (op0) == compl_code
8241 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8242 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8244 /* MIN (MAX (b, a), b) == b. */
8245 if (TREE_CODE (op0) == compl_code
8246 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8247 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8248 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8250 /* MIN (a, MAX (a, b)) == a. */
8251 if (TREE_CODE (op1) == compl_code
8252 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8253 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8254 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8256 /* MIN (a, MAX (b, a)) == a. */
8257 if (TREE_CODE (op1) == compl_code
8258 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8259 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8260 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8265 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8266 by changing CODE to reduce the magnitude of constants involved in
8267 ARG0 of the comparison.
8268 Returns a canonicalized comparison tree if a simplification was
8269 possible, otherwise returns NULL_TREE.
8270 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8271 valid if signed overflow is undefined. */
8274 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8275 tree arg0, tree arg1,
8276 bool *strict_overflow_p)
8278 enum tree_code code0 = TREE_CODE (arg0);
8279 tree t, cst0 = NULL_TREE;
8283 /* Match A +- CST code arg1 and CST code arg1. */
8284 if (!(((code0 == MINUS_EXPR
8285 || code0 == PLUS_EXPR)
8286 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8287 || code0 == INTEGER_CST))
8290 /* Identify the constant in arg0 and its sign. */
8291 if (code0 == INTEGER_CST)
8294 cst0 = TREE_OPERAND (arg0, 1);
8295 sgn0 = tree_int_cst_sgn (cst0);
8297 /* Overflowed constants and zero will cause problems. */
8298 if (integer_zerop (cst0)
8299 || TREE_OVERFLOW (cst0))
8302 /* See if we can reduce the magnitude of the constant in
8303 arg0 by changing the comparison code. */
8304 if (code0 == INTEGER_CST)
8306 /* CST <= arg1 -> CST-1 < arg1. */
8307 if (code == LE_EXPR && sgn0 == 1)
8309 /* -CST < arg1 -> -CST-1 <= arg1. */
8310 else if (code == LT_EXPR && sgn0 == -1)
8312 /* CST > arg1 -> CST-1 >= arg1. */
8313 else if (code == GT_EXPR && sgn0 == 1)
8315 /* -CST >= arg1 -> -CST-1 > arg1. */
8316 else if (code == GE_EXPR && sgn0 == -1)
8320 /* arg1 code' CST' might be more canonical. */
8325 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8327 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8329 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8330 else if (code == GT_EXPR
8331 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8333 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8334 else if (code == LE_EXPR
8335 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8337 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8338 else if (code == GE_EXPR
8339 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8343 *strict_overflow_p = true;
8346 /* Now build the constant reduced in magnitude. */
8347 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8348 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8349 if (code0 != INTEGER_CST)
8350 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8352 /* If swapping might yield to a more canonical form, do so. */
8354 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8356 return fold_build2 (code, type, t, arg1);
8359 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8360 overflow further. Try to decrease the magnitude of constants involved
8361 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8362 and put sole constants at the second argument position.
8363 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8366 maybe_canonicalize_comparison (enum tree_code code, tree type,
8367 tree arg0, tree arg1)
8370 bool strict_overflow_p;
8371 const char * const warnmsg = G_("assuming signed overflow does not occur "
8372 "when reducing constant in comparison");
8374 /* In principle pointers also have undefined overflow behavior,
8375 but that causes problems elsewhere. */
8376 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8377 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8380 /* Try canonicalization by simplifying arg0. */
8381 strict_overflow_p = false;
8382 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8383 &strict_overflow_p);
8386 if (strict_overflow_p)
8387 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8391 /* Try canonicalization by simplifying arg1 using the swapped
8393 code = swap_tree_comparison (code);
8394 strict_overflow_p = false;
8395 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8396 &strict_overflow_p);
8397 if (t && strict_overflow_p)
8398 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8402 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8403 space. This is used to avoid issuing overflow warnings for
8404 expressions like &p->x which can not wrap. */
8407 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8409 unsigned HOST_WIDE_INT offset_low, total_low;
8410 HOST_WIDE_INT size, offset_high, total_high;
8412 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8418 if (offset == NULL_TREE)
8423 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8427 offset_low = TREE_INT_CST_LOW (offset);
8428 offset_high = TREE_INT_CST_HIGH (offset);
8431 if (add_double_with_sign (offset_low, offset_high,
8432 bitpos / BITS_PER_UNIT, 0,
8433 &total_low, &total_high,
8437 if (total_high != 0)
8440 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8444 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8446 if (TREE_CODE (base) == ADDR_EXPR)
8448 HOST_WIDE_INT base_size;
8450 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8451 if (base_size > 0 && size < base_size)
8455 return total_low > (unsigned HOST_WIDE_INT) size;
8458 /* Subroutine of fold_binary. This routine performs all of the
8459 transformations that are common to the equality/inequality
8460 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8461 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8462 fold_binary should call fold_binary. Fold a comparison with
8463 tree code CODE and type TYPE with operands OP0 and OP1. Return
8464 the folded comparison or NULL_TREE. */
8467 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8469 tree arg0, arg1, tem;
8474 STRIP_SIGN_NOPS (arg0);
8475 STRIP_SIGN_NOPS (arg1);
8477 tem = fold_relational_const (code, type, arg0, arg1);
8478 if (tem != NULL_TREE)
8481 /* If one arg is a real or integer constant, put it last. */
8482 if (tree_swap_operands_p (arg0, arg1, true))
8483 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8485 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8486 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8487 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8488 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8489 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8490 && (TREE_CODE (arg1) == INTEGER_CST
8491 && !TREE_OVERFLOW (arg1)))
8493 tree const1 = TREE_OPERAND (arg0, 1);
8495 tree variable = TREE_OPERAND (arg0, 0);
8498 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8500 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8501 TREE_TYPE (arg1), const2, const1);
8503 /* If the constant operation overflowed this can be
8504 simplified as a comparison against INT_MAX/INT_MIN. */
8505 if (TREE_CODE (lhs) == INTEGER_CST
8506 && TREE_OVERFLOW (lhs))
8508 int const1_sgn = tree_int_cst_sgn (const1);
8509 enum tree_code code2 = code;
8511 /* Get the sign of the constant on the lhs if the
8512 operation were VARIABLE + CONST1. */
8513 if (TREE_CODE (arg0) == MINUS_EXPR)
8514 const1_sgn = -const1_sgn;
8516 /* The sign of the constant determines if we overflowed
8517 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8518 Canonicalize to the INT_MIN overflow by swapping the comparison
8520 if (const1_sgn == -1)
8521 code2 = swap_tree_comparison (code);
8523 /* We now can look at the canonicalized case
8524 VARIABLE + 1 CODE2 INT_MIN
8525 and decide on the result. */
8526 if (code2 == LT_EXPR
8528 || code2 == EQ_EXPR)
8529 return omit_one_operand (type, boolean_false_node, variable);
8530 else if (code2 == NE_EXPR
8532 || code2 == GT_EXPR)
8533 return omit_one_operand (type, boolean_true_node, variable);
8536 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8537 && (TREE_CODE (lhs) != INTEGER_CST
8538 || !TREE_OVERFLOW (lhs)))
8540 fold_overflow_warning (("assuming signed overflow does not occur "
8541 "when changing X +- C1 cmp C2 to "
8543 WARN_STRICT_OVERFLOW_COMPARISON);
8544 return fold_build2 (code, type, variable, lhs);
8548 /* For comparisons of pointers we can decompose it to a compile time
8549 comparison of the base objects and the offsets into the object.
8550 This requires at least one operand being an ADDR_EXPR or a
8551 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8552 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8553 && (TREE_CODE (arg0) == ADDR_EXPR
8554 || TREE_CODE (arg1) == ADDR_EXPR
8555 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8556 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8558 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8559 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8560 enum machine_mode mode;
8561 int volatilep, unsignedp;
8562 bool indirect_base0 = false, indirect_base1 = false;
8564 /* Get base and offset for the access. Strip ADDR_EXPR for
8565 get_inner_reference, but put it back by stripping INDIRECT_REF
8566 off the base object if possible. indirect_baseN will be true
8567 if baseN is not an address but refers to the object itself. */
8569 if (TREE_CODE (arg0) == ADDR_EXPR)
8571 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8572 &bitsize, &bitpos0, &offset0, &mode,
8573 &unsignedp, &volatilep, false);
8574 if (TREE_CODE (base0) == INDIRECT_REF)
8575 base0 = TREE_OPERAND (base0, 0);
8577 indirect_base0 = true;
8579 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8581 base0 = TREE_OPERAND (arg0, 0);
8582 offset0 = TREE_OPERAND (arg0, 1);
8586 if (TREE_CODE (arg1) == ADDR_EXPR)
8588 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8589 &bitsize, &bitpos1, &offset1, &mode,
8590 &unsignedp, &volatilep, false);
8591 if (TREE_CODE (base1) == INDIRECT_REF)
8592 base1 = TREE_OPERAND (base1, 0);
8594 indirect_base1 = true;
8596 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8598 base1 = TREE_OPERAND (arg1, 0);
8599 offset1 = TREE_OPERAND (arg1, 1);
8602 /* If we have equivalent bases we might be able to simplify. */
8603 if (indirect_base0 == indirect_base1
8604 && operand_equal_p (base0, base1, 0))
8606 /* We can fold this expression to a constant if the non-constant
8607 offset parts are equal. */
8608 if ((offset0 == offset1
8609 || (offset0 && offset1
8610 && operand_equal_p (offset0, offset1, 0)))
8613 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8618 && bitpos0 != bitpos1
8619 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8620 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8621 fold_overflow_warning (("assuming pointer wraparound does not "
8622 "occur when comparing P +- C1 with "
8624 WARN_STRICT_OVERFLOW_CONDITIONAL);
8629 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8631 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8633 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8635 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8637 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8639 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8643 /* We can simplify the comparison to a comparison of the variable
8644 offset parts if the constant offset parts are equal.
8645 Be careful to use signed size type here because otherwise we
8646 mess with array offsets in the wrong way. This is possible
8647 because pointer arithmetic is restricted to retain within an
8648 object and overflow on pointer differences is undefined as of
8649 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8650 else if (bitpos0 == bitpos1
8651 && ((code == EQ_EXPR || code == NE_EXPR)
8652 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8654 tree signed_size_type_node;
8655 signed_size_type_node = signed_type_for (size_type_node);
8657 /* By converting to signed size type we cover middle-end pointer
8658 arithmetic which operates on unsigned pointer types of size
8659 type size and ARRAY_REF offsets which are properly sign or
8660 zero extended from their type in case it is narrower than
8662 if (offset0 == NULL_TREE)
8663 offset0 = build_int_cst (signed_size_type_node, 0);
8665 offset0 = fold_convert (signed_size_type_node, offset0);
8666 if (offset1 == NULL_TREE)
8667 offset1 = build_int_cst (signed_size_type_node, 0);
8669 offset1 = fold_convert (signed_size_type_node, offset1);
8673 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8674 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8675 fold_overflow_warning (("assuming pointer wraparound does not "
8676 "occur when comparing P +- C1 with "
8678 WARN_STRICT_OVERFLOW_COMPARISON);
8680 return fold_build2 (code, type, offset0, offset1);
8683 /* For non-equal bases we can simplify if they are addresses
8684 of local binding decls or constants. */
8685 else if (indirect_base0 && indirect_base1
8686 /* We know that !operand_equal_p (base0, base1, 0)
8687 because the if condition was false. But make
8688 sure two decls are not the same. */
8690 && TREE_CODE (arg0) == ADDR_EXPR
8691 && TREE_CODE (arg1) == ADDR_EXPR
8692 && (((TREE_CODE (base0) == VAR_DECL
8693 || TREE_CODE (base0) == PARM_DECL)
8694 && (targetm.binds_local_p (base0)
8695 || CONSTANT_CLASS_P (base1)))
8696 || CONSTANT_CLASS_P (base0))
8697 && (((TREE_CODE (base1) == VAR_DECL
8698 || TREE_CODE (base1) == PARM_DECL)
8699 && (targetm.binds_local_p (base1)
8700 || CONSTANT_CLASS_P (base0)))
8701 || CONSTANT_CLASS_P (base1)))
8703 if (code == EQ_EXPR)
8704 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8705 else if (code == NE_EXPR)
8706 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8708 /* For equal offsets we can simplify to a comparison of the
8710 else if (bitpos0 == bitpos1
8712 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8714 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8715 && ((offset0 == offset1)
8716 || (offset0 && offset1
8717 && operand_equal_p (offset0, offset1, 0))))
8720 base0 = fold_addr_expr (base0);
8722 base1 = fold_addr_expr (base1);
8723 return fold_build2 (code, type, base0, base1);
8727 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8728 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8729 the resulting offset is smaller in absolute value than the
8731 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8732 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8733 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8734 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8735 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8736 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8737 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8739 tree const1 = TREE_OPERAND (arg0, 1);
8740 tree const2 = TREE_OPERAND (arg1, 1);
8741 tree variable1 = TREE_OPERAND (arg0, 0);
8742 tree variable2 = TREE_OPERAND (arg1, 0);
8744 const char * const warnmsg = G_("assuming signed overflow does not "
8745 "occur when combining constants around "
8748 /* Put the constant on the side where it doesn't overflow and is
8749 of lower absolute value than before. */
8750 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8751 ? MINUS_EXPR : PLUS_EXPR,
8753 if (!TREE_OVERFLOW (cst)
8754 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8756 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8757 return fold_build2 (code, type,
8759 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8763 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8764 ? MINUS_EXPR : PLUS_EXPR,
8766 if (!TREE_OVERFLOW (cst)
8767 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8769 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8770 return fold_build2 (code, type,
8771 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8777 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8778 signed arithmetic case. That form is created by the compiler
8779 often enough for folding it to be of value. One example is in
8780 computing loop trip counts after Operator Strength Reduction. */
8781 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8782 && TREE_CODE (arg0) == MULT_EXPR
8783 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8784 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8785 && integer_zerop (arg1))
8787 tree const1 = TREE_OPERAND (arg0, 1);
8788 tree const2 = arg1; /* zero */
8789 tree variable1 = TREE_OPERAND (arg0, 0);
8790 enum tree_code cmp_code = code;
8792 gcc_assert (!integer_zerop (const1));
8794 fold_overflow_warning (("assuming signed overflow does not occur when "
8795 "eliminating multiplication in comparison "
8797 WARN_STRICT_OVERFLOW_COMPARISON);
8799 /* If const1 is negative we swap the sense of the comparison. */
8800 if (tree_int_cst_sgn (const1) < 0)
8801 cmp_code = swap_tree_comparison (cmp_code);
8803 return fold_build2 (cmp_code, type, variable1, const2);
8806 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8810 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8812 tree targ0 = strip_float_extensions (arg0);
8813 tree targ1 = strip_float_extensions (arg1);
8814 tree newtype = TREE_TYPE (targ0);
8816 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8817 newtype = TREE_TYPE (targ1);
8819 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8820 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8821 return fold_build2 (code, type, fold_convert (newtype, targ0),
8822 fold_convert (newtype, targ1));
8824 /* (-a) CMP (-b) -> b CMP a */
8825 if (TREE_CODE (arg0) == NEGATE_EXPR
8826 && TREE_CODE (arg1) == NEGATE_EXPR)
8827 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8828 TREE_OPERAND (arg0, 0));
8830 if (TREE_CODE (arg1) == REAL_CST)
8832 REAL_VALUE_TYPE cst;
8833 cst = TREE_REAL_CST (arg1);
8835 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8836 if (TREE_CODE (arg0) == NEGATE_EXPR)
8837 return fold_build2 (swap_tree_comparison (code), type,
8838 TREE_OPERAND (arg0, 0),
8839 build_real (TREE_TYPE (arg1),
8840 REAL_VALUE_NEGATE (cst)));
8842 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8843 /* a CMP (-0) -> a CMP 0 */
8844 if (REAL_VALUE_MINUS_ZERO (cst))
8845 return fold_build2 (code, type, arg0,
8846 build_real (TREE_TYPE (arg1), dconst0));
8848 /* x != NaN is always true, other ops are always false. */
8849 if (REAL_VALUE_ISNAN (cst)
8850 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8852 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8853 return omit_one_operand (type, tem, arg0);
8856 /* Fold comparisons against infinity. */
8857 if (REAL_VALUE_ISINF (cst))
8859 tem = fold_inf_compare (code, type, arg0, arg1);
8860 if (tem != NULL_TREE)
8865 /* If this is a comparison of a real constant with a PLUS_EXPR
8866 or a MINUS_EXPR of a real constant, we can convert it into a
8867 comparison with a revised real constant as long as no overflow
8868 occurs when unsafe_math_optimizations are enabled. */
8869 if (flag_unsafe_math_optimizations
8870 && TREE_CODE (arg1) == REAL_CST
8871 && (TREE_CODE (arg0) == PLUS_EXPR
8872 || TREE_CODE (arg0) == MINUS_EXPR)
8873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8874 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8875 ? MINUS_EXPR : PLUS_EXPR,
8876 arg1, TREE_OPERAND (arg0, 1), 0))
8877 && !TREE_OVERFLOW (tem))
8878 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8880 /* Likewise, we can simplify a comparison of a real constant with
8881 a MINUS_EXPR whose first operand is also a real constant, i.e.
8882 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8883 floating-point types only if -fassociative-math is set. */
8884 if (flag_associative_math
8885 && TREE_CODE (arg1) == REAL_CST
8886 && TREE_CODE (arg0) == MINUS_EXPR
8887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8888 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8890 && !TREE_OVERFLOW (tem))
8891 return fold_build2 (swap_tree_comparison (code), type,
8892 TREE_OPERAND (arg0, 1), tem);
8894 /* Fold comparisons against built-in math functions. */
8895 if (TREE_CODE (arg1) == REAL_CST
8896 && flag_unsafe_math_optimizations
8897 && ! flag_errno_math)
8899 enum built_in_function fcode = builtin_mathfn_code (arg0);
8901 if (fcode != END_BUILTINS)
8903 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8904 if (tem != NULL_TREE)
8910 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8911 && CONVERT_EXPR_P (arg0))
8913 /* If we are widening one operand of an integer comparison,
8914 see if the other operand is similarly being widened. Perhaps we
8915 can do the comparison in the narrower type. */
8916 tem = fold_widened_comparison (code, type, arg0, arg1);
8920 /* Or if we are changing signedness. */
8921 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8926 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8927 constant, we can simplify it. */
8928 if (TREE_CODE (arg1) == INTEGER_CST
8929 && (TREE_CODE (arg0) == MIN_EXPR
8930 || TREE_CODE (arg0) == MAX_EXPR)
8931 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8933 tem = optimize_minmax_comparison (code, type, op0, op1);
8938 /* Simplify comparison of something with itself. (For IEEE
8939 floating-point, we can only do some of these simplifications.) */
8940 if (operand_equal_p (arg0, arg1, 0))
8945 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8946 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8947 return constant_boolean_node (1, type);
8952 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8953 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8954 return constant_boolean_node (1, type);
8955 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8958 /* For NE, we can only do this simplification if integer
8959 or we don't honor IEEE floating point NaNs. */
8960 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8961 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8963 /* ... fall through ... */
8966 return constant_boolean_node (0, type);
8972 /* If we are comparing an expression that just has comparisons
8973 of two integer values, arithmetic expressions of those comparisons,
8974 and constants, we can simplify it. There are only three cases
8975 to check: the two values can either be equal, the first can be
8976 greater, or the second can be greater. Fold the expression for
8977 those three values. Since each value must be 0 or 1, we have
8978 eight possibilities, each of which corresponds to the constant 0
8979 or 1 or one of the six possible comparisons.
8981 This handles common cases like (a > b) == 0 but also handles
8982 expressions like ((x > y) - (y > x)) > 0, which supposedly
8983 occur in macroized code. */
8985 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8987 tree cval1 = 0, cval2 = 0;
8990 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8991 /* Don't handle degenerate cases here; they should already
8992 have been handled anyway. */
8993 && cval1 != 0 && cval2 != 0
8994 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8995 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8996 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8997 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8998 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8999 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9000 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9002 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9003 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9005 /* We can't just pass T to eval_subst in case cval1 or cval2
9006 was the same as ARG1. */
9009 = fold_build2 (code, type,
9010 eval_subst (arg0, cval1, maxval,
9014 = fold_build2 (code, type,
9015 eval_subst (arg0, cval1, maxval,
9019 = fold_build2 (code, type,
9020 eval_subst (arg0, cval1, minval,
9024 /* All three of these results should be 0 or 1. Confirm they are.
9025 Then use those values to select the proper code to use. */
9027 if (TREE_CODE (high_result) == INTEGER_CST
9028 && TREE_CODE (equal_result) == INTEGER_CST
9029 && TREE_CODE (low_result) == INTEGER_CST)
9031 /* Make a 3-bit mask with the high-order bit being the
9032 value for `>', the next for '=', and the low for '<'. */
9033 switch ((integer_onep (high_result) * 4)
9034 + (integer_onep (equal_result) * 2)
9035 + integer_onep (low_result))
9039 return omit_one_operand (type, integer_zero_node, arg0);
9060 return omit_one_operand (type, integer_one_node, arg0);
9064 return save_expr (build2 (code, type, cval1, cval2));
9065 return fold_build2 (code, type, cval1, cval2);
9070 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9071 into a single range test. */
9072 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9073 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9074 && TREE_CODE (arg1) == INTEGER_CST
9075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9076 && !integer_zerop (TREE_OPERAND (arg0, 1))
9077 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9078 && !TREE_OVERFLOW (arg1))
9080 tem = fold_div_compare (code, type, arg0, arg1);
9081 if (tem != NULL_TREE)
9085 /* Fold ~X op ~Y as Y op X. */
9086 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9087 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9089 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9090 return fold_build2 (code, type,
9091 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9092 TREE_OPERAND (arg0, 0));
9095 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9096 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9097 && TREE_CODE (arg1) == INTEGER_CST)
9099 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9100 return fold_build2 (swap_tree_comparison (code), type,
9101 TREE_OPERAND (arg0, 0),
9102 fold_build1 (BIT_NOT_EXPR, cmp_type,
9103 fold_convert (cmp_type, arg1)));
9110 /* Subroutine of fold_binary. Optimize complex multiplications of the
9111 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9112 argument EXPR represents the expression "z" of type TYPE. */
9115 fold_mult_zconjz (tree type, tree expr)
9117 tree itype = TREE_TYPE (type);
9118 tree rpart, ipart, tem;
9120 if (TREE_CODE (expr) == COMPLEX_EXPR)
9122 rpart = TREE_OPERAND (expr, 0);
9123 ipart = TREE_OPERAND (expr, 1);
9125 else if (TREE_CODE (expr) == COMPLEX_CST)
9127 rpart = TREE_REALPART (expr);
9128 ipart = TREE_IMAGPART (expr);
9132 expr = save_expr (expr);
9133 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9134 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9137 rpart = save_expr (rpart);
9138 ipart = save_expr (ipart);
9139 tem = fold_build2 (PLUS_EXPR, itype,
9140 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9141 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9142 return fold_build2 (COMPLEX_EXPR, type, tem,
9143 fold_convert (itype, integer_zero_node));
9147 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9148 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9149 guarantees that P and N have the same least significant log2(M) bits.
9150 N is not otherwise constrained. In particular, N is not normalized to
9151 0 <= N < M as is common. In general, the precise value of P is unknown.
9152 M is chosen as large as possible such that constant N can be determined.
9154 Returns M and sets *RESIDUE to N. */
9156 static unsigned HOST_WIDE_INT
9157 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9159 enum tree_code code;
9163 code = TREE_CODE (expr);
9164 if (code == ADDR_EXPR)
9166 expr = TREE_OPERAND (expr, 0);
9167 if (handled_component_p (expr))
9169 HOST_WIDE_INT bitsize, bitpos;
9171 enum machine_mode mode;
9172 int unsignedp, volatilep;
9174 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9175 &mode, &unsignedp, &volatilep, false);
9176 *residue = bitpos / BITS_PER_UNIT;
9179 if (TREE_CODE (offset) == INTEGER_CST)
9180 *residue += TREE_INT_CST_LOW (offset);
9182 /* We don't handle more complicated offset expressions. */
9187 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9188 return DECL_ALIGN_UNIT (expr);
9190 else if (code == POINTER_PLUS_EXPR)
9193 unsigned HOST_WIDE_INT modulus;
9194 enum tree_code inner_code;
9196 op0 = TREE_OPERAND (expr, 0);
9198 modulus = get_pointer_modulus_and_residue (op0, residue);
9200 op1 = TREE_OPERAND (expr, 1);
9202 inner_code = TREE_CODE (op1);
9203 if (inner_code == INTEGER_CST)
9205 *residue += TREE_INT_CST_LOW (op1);
9208 else if (inner_code == MULT_EXPR)
9210 op1 = TREE_OPERAND (op1, 1);
9211 if (TREE_CODE (op1) == INTEGER_CST)
9213 unsigned HOST_WIDE_INT align;
9215 /* Compute the greatest power-of-2 divisor of op1. */
9216 align = TREE_INT_CST_LOW (op1);
9219 /* If align is non-zero and less than *modulus, replace
9220 *modulus with align., If align is 0, then either op1 is 0
9221 or the greatest power-of-2 divisor of op1 doesn't fit in an
9222 unsigned HOST_WIDE_INT. In either case, no additional
9223 constraint is imposed. */
9225 modulus = MIN (modulus, align);
9232 /* If we get here, we were unable to determine anything useful about the
9238 /* Fold a binary expression of code CODE and type TYPE with operands
9239 OP0 and OP1. Return the folded expression if folding is
9240 successful. Otherwise, return NULL_TREE. */
9243 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9245 enum tree_code_class kind = TREE_CODE_CLASS (code);
9246 tree arg0, arg1, tem;
9247 tree t1 = NULL_TREE;
9248 bool strict_overflow_p;
9250 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9251 && TREE_CODE_LENGTH (code) == 2
9253 && op1 != NULL_TREE);
9258 /* Strip any conversions that don't change the mode. This is
9259 safe for every expression, except for a comparison expression
9260 because its signedness is derived from its operands. So, in
9261 the latter case, only strip conversions that don't change the
9262 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9265 Note that this is done as an internal manipulation within the
9266 constant folder, in order to find the simplest representation
9267 of the arguments so that their form can be studied. In any
9268 cases, the appropriate type conversions should be put back in
9269 the tree that will get out of the constant folder. */
9271 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9273 STRIP_SIGN_NOPS (arg0);
9274 STRIP_SIGN_NOPS (arg1);
9282 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9283 constant but we can't do arithmetic on them. */
9284 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9285 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9286 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9287 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9288 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9289 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9291 if (kind == tcc_binary)
9293 /* Make sure type and arg0 have the same saturating flag. */
9294 gcc_assert (TYPE_SATURATING (type)
9295 == TYPE_SATURATING (TREE_TYPE (arg0)));
9296 tem = const_binop (code, arg0, arg1, 0);
9298 else if (kind == tcc_comparison)
9299 tem = fold_relational_const (code, type, arg0, arg1);
9303 if (tem != NULL_TREE)
9305 if (TREE_TYPE (tem) != type)
9306 tem = fold_convert (type, tem);
9311 /* If this is a commutative operation, and ARG0 is a constant, move it
9312 to ARG1 to reduce the number of tests below. */
9313 if (commutative_tree_code (code)
9314 && tree_swap_operands_p (arg0, arg1, true))
9315 return fold_build2 (code, type, op1, op0);
9317 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9319 First check for cases where an arithmetic operation is applied to a
9320 compound, conditional, or comparison operation. Push the arithmetic
9321 operation inside the compound or conditional to see if any folding
9322 can then be done. Convert comparison to conditional for this purpose.
9323 The also optimizes non-constant cases that used to be done in
9326 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9327 one of the operands is a comparison and the other is a comparison, a
9328 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9329 code below would make the expression more complex. Change it to a
9330 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9331 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9333 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9334 || code == EQ_EXPR || code == NE_EXPR)
9335 && ((truth_value_p (TREE_CODE (arg0))
9336 && (truth_value_p (TREE_CODE (arg1))
9337 || (TREE_CODE (arg1) == BIT_AND_EXPR
9338 && integer_onep (TREE_OPERAND (arg1, 1)))))
9339 || (truth_value_p (TREE_CODE (arg1))
9340 && (truth_value_p (TREE_CODE (arg0))
9341 || (TREE_CODE (arg0) == BIT_AND_EXPR
9342 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9344 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9345 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9348 fold_convert (boolean_type_node, arg0),
9349 fold_convert (boolean_type_node, arg1));
9351 if (code == EQ_EXPR)
9352 tem = invert_truthvalue (tem);
9354 return fold_convert (type, tem);
9357 if (TREE_CODE_CLASS (code) == tcc_binary
9358 || TREE_CODE_CLASS (code) == tcc_comparison)
9360 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9361 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9362 fold_build2 (code, type,
9363 fold_convert (TREE_TYPE (op0),
9364 TREE_OPERAND (arg0, 1)),
9366 if (TREE_CODE (arg1) == COMPOUND_EXPR
9367 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9368 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9369 fold_build2 (code, type, op0,
9370 fold_convert (TREE_TYPE (op1),
9371 TREE_OPERAND (arg1, 1))));
9373 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9375 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9377 /*cond_first_p=*/1);
9378 if (tem != NULL_TREE)
9382 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9384 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9386 /*cond_first_p=*/0);
9387 if (tem != NULL_TREE)
9394 case POINTER_PLUS_EXPR:
9395 /* 0 +p index -> (type)index */
9396 if (integer_zerop (arg0))
9397 return non_lvalue (fold_convert (type, arg1));
9399 /* PTR +p 0 -> PTR */
9400 if (integer_zerop (arg1))
9401 return non_lvalue (fold_convert (type, arg0));
9403 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9404 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9405 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9406 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9407 fold_convert (sizetype, arg1),
9408 fold_convert (sizetype, arg0)));
9410 /* index +p PTR -> PTR +p index */
9411 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9412 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9413 return fold_build2 (POINTER_PLUS_EXPR, type,
9414 fold_convert (type, arg1),
9415 fold_convert (sizetype, arg0));
9417 /* (PTR +p B) +p A -> PTR +p (B + A) */
9418 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9421 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9422 tree arg00 = TREE_OPERAND (arg0, 0);
9423 inner = fold_build2 (PLUS_EXPR, sizetype,
9424 arg01, fold_convert (sizetype, arg1));
9425 return fold_convert (type,
9426 fold_build2 (POINTER_PLUS_EXPR,
9427 TREE_TYPE (arg00), arg00, inner));
9430 /* PTR_CST +p CST -> CST1 */
9431 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9432 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9434 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9435 of the array. Loop optimizer sometimes produce this type of
9437 if (TREE_CODE (arg0) == ADDR_EXPR)
9439 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9441 return fold_convert (type, tem);
9447 /* PTR + INT -> (INT)(PTR p+ INT) */
9448 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9449 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9450 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9453 fold_convert (sizetype, arg1)));
9454 /* INT + PTR -> (INT)(PTR p+ INT) */
9455 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9456 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9457 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9460 fold_convert (sizetype, arg0)));
9461 /* A + (-B) -> A - B */
9462 if (TREE_CODE (arg1) == NEGATE_EXPR)
9463 return fold_build2 (MINUS_EXPR, type,
9464 fold_convert (type, arg0),
9465 fold_convert (type, TREE_OPERAND (arg1, 0)));
9466 /* (-A) + B -> B - A */
9467 if (TREE_CODE (arg0) == NEGATE_EXPR
9468 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9469 return fold_build2 (MINUS_EXPR, type,
9470 fold_convert (type, arg1),
9471 fold_convert (type, TREE_OPERAND (arg0, 0)));
9473 if (INTEGRAL_TYPE_P (type))
9475 /* Convert ~A + 1 to -A. */
9476 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9477 && integer_onep (arg1))
9478 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9481 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9482 && !TYPE_OVERFLOW_TRAPS (type))
9484 tree tem = TREE_OPERAND (arg0, 0);
9487 if (operand_equal_p (tem, arg1, 0))
9489 t1 = build_int_cst_type (type, -1);
9490 return omit_one_operand (type, t1, arg1);
9495 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9496 && !TYPE_OVERFLOW_TRAPS (type))
9498 tree tem = TREE_OPERAND (arg1, 0);
9501 if (operand_equal_p (arg0, tem, 0))
9503 t1 = build_int_cst_type (type, -1);
9504 return omit_one_operand (type, t1, arg0);
9508 /* X + (X / CST) * -CST is X % CST. */
9509 if (TREE_CODE (arg1) == MULT_EXPR
9510 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9511 && operand_equal_p (arg0,
9512 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9514 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9515 tree cst1 = TREE_OPERAND (arg1, 1);
9516 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9517 if (sum && integer_zerop (sum))
9518 return fold_convert (type,
9519 fold_build2 (TRUNC_MOD_EXPR,
9520 TREE_TYPE (arg0), arg0, cst0));
9524 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9525 same or one. Make sure type is not saturating.
9526 fold_plusminus_mult_expr will re-associate. */
9527 if ((TREE_CODE (arg0) == MULT_EXPR
9528 || TREE_CODE (arg1) == MULT_EXPR)
9529 && !TYPE_SATURATING (type)
9530 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9532 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9537 if (! FLOAT_TYPE_P (type))
9539 if (integer_zerop (arg1))
9540 return non_lvalue (fold_convert (type, arg0));
9542 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9543 with a constant, and the two constants have no bits in common,
9544 we should treat this as a BIT_IOR_EXPR since this may produce more
9546 if (TREE_CODE (arg0) == BIT_AND_EXPR
9547 && TREE_CODE (arg1) == BIT_AND_EXPR
9548 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9549 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9550 && integer_zerop (const_binop (BIT_AND_EXPR,
9551 TREE_OPERAND (arg0, 1),
9552 TREE_OPERAND (arg1, 1), 0)))
9554 code = BIT_IOR_EXPR;
9558 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9559 (plus (plus (mult) (mult)) (foo)) so that we can
9560 take advantage of the factoring cases below. */
9561 if (((TREE_CODE (arg0) == PLUS_EXPR
9562 || TREE_CODE (arg0) == MINUS_EXPR)
9563 && TREE_CODE (arg1) == MULT_EXPR)
9564 || ((TREE_CODE (arg1) == PLUS_EXPR
9565 || TREE_CODE (arg1) == MINUS_EXPR)
9566 && TREE_CODE (arg0) == MULT_EXPR))
9568 tree parg0, parg1, parg, marg;
9569 enum tree_code pcode;
9571 if (TREE_CODE (arg1) == MULT_EXPR)
9572 parg = arg0, marg = arg1;
9574 parg = arg1, marg = arg0;
9575 pcode = TREE_CODE (parg);
9576 parg0 = TREE_OPERAND (parg, 0);
9577 parg1 = TREE_OPERAND (parg, 1);
9581 if (TREE_CODE (parg0) == MULT_EXPR
9582 && TREE_CODE (parg1) != MULT_EXPR)
9583 return fold_build2 (pcode, type,
9584 fold_build2 (PLUS_EXPR, type,
9585 fold_convert (type, parg0),
9586 fold_convert (type, marg)),
9587 fold_convert (type, parg1));
9588 if (TREE_CODE (parg0) != MULT_EXPR
9589 && TREE_CODE (parg1) == MULT_EXPR)
9590 return fold_build2 (PLUS_EXPR, type,
9591 fold_convert (type, parg0),
9592 fold_build2 (pcode, type,
9593 fold_convert (type, marg),
9600 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9601 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9602 return non_lvalue (fold_convert (type, arg0));
9604 /* Likewise if the operands are reversed. */
9605 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9606 return non_lvalue (fold_convert (type, arg1));
9608 /* Convert X + -C into X - C. */
9609 if (TREE_CODE (arg1) == REAL_CST
9610 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9612 tem = fold_negate_const (arg1, type);
9613 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9614 return fold_build2 (MINUS_EXPR, type,
9615 fold_convert (type, arg0),
9616 fold_convert (type, tem));
9619 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9620 to __complex__ ( x, y ). This is not the same for SNaNs or
9621 if signed zeros are involved. */
9622 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9624 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9626 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9627 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9628 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9629 bool arg0rz = false, arg0iz = false;
9630 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9631 || (arg0i && (arg0iz = real_zerop (arg0i))))
9633 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9634 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9635 if (arg0rz && arg1i && real_zerop (arg1i))
9637 tree rp = arg1r ? arg1r
9638 : build1 (REALPART_EXPR, rtype, arg1);
9639 tree ip = arg0i ? arg0i
9640 : build1 (IMAGPART_EXPR, rtype, arg0);
9641 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9643 else if (arg0iz && arg1r && real_zerop (arg1r))
9645 tree rp = arg0r ? arg0r
9646 : build1 (REALPART_EXPR, rtype, arg0);
9647 tree ip = arg1i ? arg1i
9648 : build1 (IMAGPART_EXPR, rtype, arg1);
9649 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9654 if (flag_unsafe_math_optimizations
9655 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9656 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9657 && (tem = distribute_real_division (code, type, arg0, arg1)))
9660 /* Convert x+x into x*2.0. */
9661 if (operand_equal_p (arg0, arg1, 0)
9662 && SCALAR_FLOAT_TYPE_P (type))
9663 return fold_build2 (MULT_EXPR, type, arg0,
9664 build_real (type, dconst2));
9666 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9667 We associate floats only if the user has specified
9668 -fassociative-math. */
9669 if (flag_associative_math
9670 && TREE_CODE (arg1) == PLUS_EXPR
9671 && TREE_CODE (arg0) != MULT_EXPR)
9673 tree tree10 = TREE_OPERAND (arg1, 0);
9674 tree tree11 = TREE_OPERAND (arg1, 1);
9675 if (TREE_CODE (tree11) == MULT_EXPR
9676 && TREE_CODE (tree10) == MULT_EXPR)
9679 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9680 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9683 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9684 We associate floats only if the user has specified
9685 -fassociative-math. */
9686 if (flag_associative_math
9687 && TREE_CODE (arg0) == PLUS_EXPR
9688 && TREE_CODE (arg1) != MULT_EXPR)
9690 tree tree00 = TREE_OPERAND (arg0, 0);
9691 tree tree01 = TREE_OPERAND (arg0, 1);
9692 if (TREE_CODE (tree01) == MULT_EXPR
9693 && TREE_CODE (tree00) == MULT_EXPR)
9696 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9697 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9703 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9704 is a rotate of A by C1 bits. */
9705 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9706 is a rotate of A by B bits. */
9708 enum tree_code code0, code1;
9710 code0 = TREE_CODE (arg0);
9711 code1 = TREE_CODE (arg1);
9712 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9713 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9714 && operand_equal_p (TREE_OPERAND (arg0, 0),
9715 TREE_OPERAND (arg1, 0), 0)
9716 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9717 TYPE_UNSIGNED (rtype))
9718 /* Only create rotates in complete modes. Other cases are not
9719 expanded properly. */
9720 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9722 tree tree01, tree11;
9723 enum tree_code code01, code11;
9725 tree01 = TREE_OPERAND (arg0, 1);
9726 tree11 = TREE_OPERAND (arg1, 1);
9727 STRIP_NOPS (tree01);
9728 STRIP_NOPS (tree11);
9729 code01 = TREE_CODE (tree01);
9730 code11 = TREE_CODE (tree11);
9731 if (code01 == INTEGER_CST
9732 && code11 == INTEGER_CST
9733 && TREE_INT_CST_HIGH (tree01) == 0
9734 && TREE_INT_CST_HIGH (tree11) == 0
9735 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9736 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9737 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9738 code0 == LSHIFT_EXPR ? tree01 : tree11);
9739 else if (code11 == MINUS_EXPR)
9741 tree tree110, tree111;
9742 tree110 = TREE_OPERAND (tree11, 0);
9743 tree111 = TREE_OPERAND (tree11, 1);
9744 STRIP_NOPS (tree110);
9745 STRIP_NOPS (tree111);
9746 if (TREE_CODE (tree110) == INTEGER_CST
9747 && 0 == compare_tree_int (tree110,
9749 (TREE_TYPE (TREE_OPERAND
9751 && operand_equal_p (tree01, tree111, 0))
9752 return build2 ((code0 == LSHIFT_EXPR
9755 type, TREE_OPERAND (arg0, 0), tree01);
9757 else if (code01 == MINUS_EXPR)
9759 tree tree010, tree011;
9760 tree010 = TREE_OPERAND (tree01, 0);
9761 tree011 = TREE_OPERAND (tree01, 1);
9762 STRIP_NOPS (tree010);
9763 STRIP_NOPS (tree011);
9764 if (TREE_CODE (tree010) == INTEGER_CST
9765 && 0 == compare_tree_int (tree010,
9767 (TREE_TYPE (TREE_OPERAND
9769 && operand_equal_p (tree11, tree011, 0))
9770 return build2 ((code0 != LSHIFT_EXPR
9773 type, TREE_OPERAND (arg0, 0), tree11);
9779 /* In most languages, can't associate operations on floats through
9780 parentheses. Rather than remember where the parentheses were, we
9781 don't associate floats at all, unless the user has specified
9783 And, we need to make sure type is not saturating. */
9785 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9786 && !TYPE_SATURATING (type))
9788 tree var0, con0, lit0, minus_lit0;
9789 tree var1, con1, lit1, minus_lit1;
9792 /* Split both trees into variables, constants, and literals. Then
9793 associate each group together, the constants with literals,
9794 then the result with variables. This increases the chances of
9795 literals being recombined later and of generating relocatable
9796 expressions for the sum of a constant and literal. */
9797 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9798 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9799 code == MINUS_EXPR);
9801 /* With undefined overflow we can only associate constants
9802 with one variable. */
9803 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9804 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9810 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9811 tmp0 = TREE_OPERAND (tmp0, 0);
9812 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9813 tmp1 = TREE_OPERAND (tmp1, 0);
9814 /* The only case we can still associate with two variables
9815 is if they are the same, modulo negation. */
9816 if (!operand_equal_p (tmp0, tmp1, 0))
9820 /* Only do something if we found more than two objects. Otherwise,
9821 nothing has changed and we risk infinite recursion. */
9823 && (2 < ((var0 != 0) + (var1 != 0)
9824 + (con0 != 0) + (con1 != 0)
9825 + (lit0 != 0) + (lit1 != 0)
9826 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9828 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9829 if (code == MINUS_EXPR)
9832 var0 = associate_trees (var0, var1, code, type);
9833 con0 = associate_trees (con0, con1, code, type);
9834 lit0 = associate_trees (lit0, lit1, code, type);
9835 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9837 /* Preserve the MINUS_EXPR if the negative part of the literal is
9838 greater than the positive part. Otherwise, the multiplicative
9839 folding code (i.e extract_muldiv) may be fooled in case
9840 unsigned constants are subtracted, like in the following
9841 example: ((X*2 + 4) - 8U)/2. */
9842 if (minus_lit0 && lit0)
9844 if (TREE_CODE (lit0) == INTEGER_CST
9845 && TREE_CODE (minus_lit0) == INTEGER_CST
9846 && tree_int_cst_lt (lit0, minus_lit0))
9848 minus_lit0 = associate_trees (minus_lit0, lit0,
9854 lit0 = associate_trees (lit0, minus_lit0,
9862 return fold_convert (type,
9863 associate_trees (var0, minus_lit0,
9867 con0 = associate_trees (con0, minus_lit0,
9869 return fold_convert (type,
9870 associate_trees (var0, con0,
9875 con0 = associate_trees (con0, lit0, code, type);
9876 return fold_convert (type, associate_trees (var0, con0,
9884 /* Pointer simplifications for subtraction, simple reassociations. */
9885 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9887 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9888 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9889 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9891 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9892 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9893 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9894 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9895 return fold_build2 (PLUS_EXPR, type,
9896 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9897 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9899 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9900 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9902 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9903 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9904 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9906 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9909 /* A - (-B) -> A + B */
9910 if (TREE_CODE (arg1) == NEGATE_EXPR)
9911 return fold_build2 (PLUS_EXPR, type, op0,
9912 fold_convert (type, TREE_OPERAND (arg1, 0)));
9913 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9914 if (TREE_CODE (arg0) == NEGATE_EXPR
9915 && (FLOAT_TYPE_P (type)
9916 || INTEGRAL_TYPE_P (type))
9917 && negate_expr_p (arg1)
9918 && reorder_operands_p (arg0, arg1))
9919 return fold_build2 (MINUS_EXPR, type,
9920 fold_convert (type, negate_expr (arg1)),
9921 fold_convert (type, TREE_OPERAND (arg0, 0)));
9922 /* Convert -A - 1 to ~A. */
9923 if (INTEGRAL_TYPE_P (type)
9924 && TREE_CODE (arg0) == NEGATE_EXPR
9925 && integer_onep (arg1)
9926 && !TYPE_OVERFLOW_TRAPS (type))
9927 return fold_build1 (BIT_NOT_EXPR, type,
9928 fold_convert (type, TREE_OPERAND (arg0, 0)));
9930 /* Convert -1 - A to ~A. */
9931 if (INTEGRAL_TYPE_P (type)
9932 && integer_all_onesp (arg0))
9933 return fold_build1 (BIT_NOT_EXPR, type, op1);
9936 /* X - (X / CST) * CST is X % CST. */
9937 if (INTEGRAL_TYPE_P (type)
9938 && TREE_CODE (arg1) == MULT_EXPR
9939 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9940 && operand_equal_p (arg0,
9941 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9942 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9943 TREE_OPERAND (arg1, 1), 0))
9944 return fold_convert (type,
9945 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9946 arg0, TREE_OPERAND (arg1, 1)));
9948 if (! FLOAT_TYPE_P (type))
9950 if (integer_zerop (arg0))
9951 return negate_expr (fold_convert (type, arg1));
9952 if (integer_zerop (arg1))
9953 return non_lvalue (fold_convert (type, arg0));
9955 /* Fold A - (A & B) into ~B & A. */
9956 if (!TREE_SIDE_EFFECTS (arg0)
9957 && TREE_CODE (arg1) == BIT_AND_EXPR)
9959 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9961 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9962 return fold_build2 (BIT_AND_EXPR, type,
9963 fold_build1 (BIT_NOT_EXPR, type, arg10),
9964 fold_convert (type, arg0));
9966 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9968 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9969 return fold_build2 (BIT_AND_EXPR, type,
9970 fold_build1 (BIT_NOT_EXPR, type, arg11),
9971 fold_convert (type, arg0));
9975 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9976 any power of 2 minus 1. */
9977 if (TREE_CODE (arg0) == BIT_AND_EXPR
9978 && TREE_CODE (arg1) == BIT_AND_EXPR
9979 && operand_equal_p (TREE_OPERAND (arg0, 0),
9980 TREE_OPERAND (arg1, 0), 0))
9982 tree mask0 = TREE_OPERAND (arg0, 1);
9983 tree mask1 = TREE_OPERAND (arg1, 1);
9984 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9986 if (operand_equal_p (tem, mask1, 0))
9988 tem = fold_build2 (BIT_XOR_EXPR, type,
9989 TREE_OPERAND (arg0, 0), mask1);
9990 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9995 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9996 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9997 return non_lvalue (fold_convert (type, arg0));
9999 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10000 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10001 (-ARG1 + ARG0) reduces to -ARG1. */
10002 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10003 return negate_expr (fold_convert (type, arg1));
10005 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10006 __complex__ ( x, -y ). This is not the same for SNaNs or if
10007 signed zeros are involved. */
10008 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10009 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10010 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10012 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10013 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10014 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10015 bool arg0rz = false, arg0iz = false;
10016 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10017 || (arg0i && (arg0iz = real_zerop (arg0i))))
10019 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10020 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10021 if (arg0rz && arg1i && real_zerop (arg1i))
10023 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10025 : build1 (REALPART_EXPR, rtype, arg1));
10026 tree ip = arg0i ? arg0i
10027 : build1 (IMAGPART_EXPR, rtype, arg0);
10028 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10030 else if (arg0iz && arg1r && real_zerop (arg1r))
10032 tree rp = arg0r ? arg0r
10033 : build1 (REALPART_EXPR, rtype, arg0);
10034 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10036 : build1 (IMAGPART_EXPR, rtype, arg1));
10037 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10042 /* Fold &x - &x. This can happen from &x.foo - &x.
10043 This is unsafe for certain floats even in non-IEEE formats.
10044 In IEEE, it is unsafe because it does wrong for NaNs.
10045 Also note that operand_equal_p is always false if an operand
10048 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10049 && operand_equal_p (arg0, arg1, 0))
10050 return fold_convert (type, integer_zero_node);
10052 /* A - B -> A + (-B) if B is easily negatable. */
10053 if (negate_expr_p (arg1)
10054 && ((FLOAT_TYPE_P (type)
10055 /* Avoid this transformation if B is a positive REAL_CST. */
10056 && (TREE_CODE (arg1) != REAL_CST
10057 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10058 || INTEGRAL_TYPE_P (type)))
10059 return fold_build2 (PLUS_EXPR, type,
10060 fold_convert (type, arg0),
10061 fold_convert (type, negate_expr (arg1)));
10063 /* Try folding difference of addresses. */
10065 HOST_WIDE_INT diff;
10067 if ((TREE_CODE (arg0) == ADDR_EXPR
10068 || TREE_CODE (arg1) == ADDR_EXPR)
10069 && ptr_difference_const (arg0, arg1, &diff))
10070 return build_int_cst_type (type, diff);
10073 /* Fold &a[i] - &a[j] to i-j. */
10074 if (TREE_CODE (arg0) == ADDR_EXPR
10075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10076 && TREE_CODE (arg1) == ADDR_EXPR
10077 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10079 tree aref0 = TREE_OPERAND (arg0, 0);
10080 tree aref1 = TREE_OPERAND (arg1, 0);
10081 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10082 TREE_OPERAND (aref1, 0), 0))
10084 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10085 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10086 tree esz = array_ref_element_size (aref0);
10087 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10088 return fold_build2 (MULT_EXPR, type, diff,
10089 fold_convert (type, esz));
10094 if (flag_unsafe_math_optimizations
10095 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10096 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10097 && (tem = distribute_real_division (code, type, arg0, arg1)))
10100 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10101 same or one. Make sure type is not saturating.
10102 fold_plusminus_mult_expr will re-associate. */
10103 if ((TREE_CODE (arg0) == MULT_EXPR
10104 || TREE_CODE (arg1) == MULT_EXPR)
10105 && !TYPE_SATURATING (type)
10106 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10108 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10116 /* (-A) * (-B) -> A * B */
10117 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10118 return fold_build2 (MULT_EXPR, type,
10119 fold_convert (type, TREE_OPERAND (arg0, 0)),
10120 fold_convert (type, negate_expr (arg1)));
10121 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10122 return fold_build2 (MULT_EXPR, type,
10123 fold_convert (type, negate_expr (arg0)),
10124 fold_convert (type, TREE_OPERAND (arg1, 0)));
10126 if (! FLOAT_TYPE_P (type))
10128 if (integer_zerop (arg1))
10129 return omit_one_operand (type, arg1, arg0);
10130 if (integer_onep (arg1))
10131 return non_lvalue (fold_convert (type, arg0));
10132 /* Transform x * -1 into -x. Make sure to do the negation
10133 on the original operand with conversions not stripped
10134 because we can only strip non-sign-changing conversions. */
10135 if (integer_all_onesp (arg1))
10136 return fold_convert (type, negate_expr (op0));
10137 /* Transform x * -C into -x * C if x is easily negatable. */
10138 if (TREE_CODE (arg1) == INTEGER_CST
10139 && tree_int_cst_sgn (arg1) == -1
10140 && negate_expr_p (arg0)
10141 && (tem = negate_expr (arg1)) != arg1
10142 && !TREE_OVERFLOW (tem))
10143 return fold_build2 (MULT_EXPR, type,
10144 fold_convert (type, negate_expr (arg0)), tem);
10146 /* (a * (1 << b)) is (a << b) */
10147 if (TREE_CODE (arg1) == LSHIFT_EXPR
10148 && integer_onep (TREE_OPERAND (arg1, 0)))
10149 return fold_build2 (LSHIFT_EXPR, type, op0,
10150 TREE_OPERAND (arg1, 1));
10151 if (TREE_CODE (arg0) == LSHIFT_EXPR
10152 && integer_onep (TREE_OPERAND (arg0, 0)))
10153 return fold_build2 (LSHIFT_EXPR, type, op1,
10154 TREE_OPERAND (arg0, 1));
10156 /* (A + A) * C -> A * 2 * C */
10157 if (TREE_CODE (arg0) == PLUS_EXPR
10158 && TREE_CODE (arg1) == INTEGER_CST
10159 && operand_equal_p (TREE_OPERAND (arg0, 0),
10160 TREE_OPERAND (arg0, 1), 0))
10161 return fold_build2 (MULT_EXPR, type,
10162 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10163 TREE_OPERAND (arg0, 1)),
10164 fold_build2 (MULT_EXPR, type,
10165 build_int_cst (type, 2) , arg1));
10167 strict_overflow_p = false;
10168 if (TREE_CODE (arg1) == INTEGER_CST
10169 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10170 &strict_overflow_p)))
10172 if (strict_overflow_p)
10173 fold_overflow_warning (("assuming signed overflow does not "
10174 "occur when simplifying "
10176 WARN_STRICT_OVERFLOW_MISC);
10177 return fold_convert (type, tem);
10180 /* Optimize z * conj(z) for integer complex numbers. */
10181 if (TREE_CODE (arg0) == CONJ_EXPR
10182 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10183 return fold_mult_zconjz (type, arg1);
10184 if (TREE_CODE (arg1) == CONJ_EXPR
10185 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10186 return fold_mult_zconjz (type, arg0);
10190 /* Maybe fold x * 0 to 0. The expressions aren't the same
10191 when x is NaN, since x * 0 is also NaN. Nor are they the
10192 same in modes with signed zeros, since multiplying a
10193 negative value by 0 gives -0, not +0. */
10194 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10195 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10196 && real_zerop (arg1))
10197 return omit_one_operand (type, arg1, arg0);
10198 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10199 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10200 && real_onep (arg1))
10201 return non_lvalue (fold_convert (type, arg0));
10203 /* Transform x * -1.0 into -x. */
10204 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10205 && real_minus_onep (arg1))
10206 return fold_convert (type, negate_expr (arg0));
10208 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10209 the result for floating point types due to rounding so it is applied
10210 only if -fassociative-math was specify. */
10211 if (flag_associative_math
10212 && TREE_CODE (arg0) == RDIV_EXPR
10213 && TREE_CODE (arg1) == REAL_CST
10214 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10216 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10219 return fold_build2 (RDIV_EXPR, type, tem,
10220 TREE_OPERAND (arg0, 1));
10223 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10224 if (operand_equal_p (arg0, arg1, 0))
10226 tree tem = fold_strip_sign_ops (arg0);
10227 if (tem != NULL_TREE)
10229 tem = fold_convert (type, tem);
10230 return fold_build2 (MULT_EXPR, type, tem, tem);
10234 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10235 This is not the same for NaNs or if signed zeros are
10237 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10238 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10239 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10240 && TREE_CODE (arg1) == COMPLEX_CST
10241 && real_zerop (TREE_REALPART (arg1)))
10243 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10244 if (real_onep (TREE_IMAGPART (arg1)))
10245 return fold_build2 (COMPLEX_EXPR, type,
10246 negate_expr (fold_build1 (IMAGPART_EXPR,
10248 fold_build1 (REALPART_EXPR, rtype, arg0));
10249 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10250 return fold_build2 (COMPLEX_EXPR, type,
10251 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10252 negate_expr (fold_build1 (REALPART_EXPR,
10256 /* Optimize z * conj(z) for floating point complex numbers.
10257 Guarded by flag_unsafe_math_optimizations as non-finite
10258 imaginary components don't produce scalar results. */
10259 if (flag_unsafe_math_optimizations
10260 && TREE_CODE (arg0) == CONJ_EXPR
10261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10262 return fold_mult_zconjz (type, arg1);
10263 if (flag_unsafe_math_optimizations
10264 && TREE_CODE (arg1) == CONJ_EXPR
10265 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10266 return fold_mult_zconjz (type, arg0);
10268 if (flag_unsafe_math_optimizations)
10270 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10271 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10273 /* Optimizations of root(...)*root(...). */
10274 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10277 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10278 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10280 /* Optimize sqrt(x)*sqrt(x) as x. */
10281 if (BUILTIN_SQRT_P (fcode0)
10282 && operand_equal_p (arg00, arg10, 0)
10283 && ! HONOR_SNANS (TYPE_MODE (type)))
10286 /* Optimize root(x)*root(y) as root(x*y). */
10287 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10288 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10289 return build_call_expr (rootfn, 1, arg);
10292 /* Optimize expN(x)*expN(y) as expN(x+y). */
10293 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10295 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10296 tree arg = fold_build2 (PLUS_EXPR, type,
10297 CALL_EXPR_ARG (arg0, 0),
10298 CALL_EXPR_ARG (arg1, 0));
10299 return build_call_expr (expfn, 1, arg);
10302 /* Optimizations of pow(...)*pow(...). */
10303 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10304 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10305 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10307 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10308 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10309 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10310 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10312 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10313 if (operand_equal_p (arg01, arg11, 0))
10315 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10316 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10317 return build_call_expr (powfn, 2, arg, arg01);
10320 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10321 if (operand_equal_p (arg00, arg10, 0))
10323 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10324 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10325 return build_call_expr (powfn, 2, arg00, arg);
10329 /* Optimize tan(x)*cos(x) as sin(x). */
10330 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10331 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10332 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10333 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10334 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10335 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10336 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10337 CALL_EXPR_ARG (arg1, 0), 0))
10339 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10341 if (sinfn != NULL_TREE)
10342 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10345 /* Optimize x*pow(x,c) as pow(x,c+1). */
10346 if (fcode1 == BUILT_IN_POW
10347 || fcode1 == BUILT_IN_POWF
10348 || fcode1 == BUILT_IN_POWL)
10350 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10351 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10352 if (TREE_CODE (arg11) == REAL_CST
10353 && !TREE_OVERFLOW (arg11)
10354 && operand_equal_p (arg0, arg10, 0))
10356 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10360 c = TREE_REAL_CST (arg11);
10361 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10362 arg = build_real (type, c);
10363 return build_call_expr (powfn, 2, arg0, arg);
10367 /* Optimize pow(x,c)*x as pow(x,c+1). */
10368 if (fcode0 == BUILT_IN_POW
10369 || fcode0 == BUILT_IN_POWF
10370 || fcode0 == BUILT_IN_POWL)
10372 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10373 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10374 if (TREE_CODE (arg01) == REAL_CST
10375 && !TREE_OVERFLOW (arg01)
10376 && operand_equal_p (arg1, arg00, 0))
10378 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10382 c = TREE_REAL_CST (arg01);
10383 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10384 arg = build_real (type, c);
10385 return build_call_expr (powfn, 2, arg1, arg);
10389 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10390 if (! optimize_size
10391 && operand_equal_p (arg0, arg1, 0))
10393 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10397 tree arg = build_real (type, dconst2);
10398 return build_call_expr (powfn, 2, arg0, arg);
10407 if (integer_all_onesp (arg1))
10408 return omit_one_operand (type, arg1, arg0);
10409 if (integer_zerop (arg1))
10410 return non_lvalue (fold_convert (type, arg0));
10411 if (operand_equal_p (arg0, arg1, 0))
10412 return non_lvalue (fold_convert (type, arg0));
10414 /* ~X | X is -1. */
10415 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10416 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10418 t1 = fold_convert (type, integer_zero_node);
10419 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10420 return omit_one_operand (type, t1, arg1);
10423 /* X | ~X is -1. */
10424 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10425 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10427 t1 = fold_convert (type, integer_zero_node);
10428 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10429 return omit_one_operand (type, t1, arg0);
10432 /* Canonicalize (X & C1) | C2. */
10433 if (TREE_CODE (arg0) == BIT_AND_EXPR
10434 && TREE_CODE (arg1) == INTEGER_CST
10435 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10437 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10438 int width = TYPE_PRECISION (type), w;
10439 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10440 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10441 hi2 = TREE_INT_CST_HIGH (arg1);
10442 lo2 = TREE_INT_CST_LOW (arg1);
10444 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10445 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10446 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10448 if (width > HOST_BITS_PER_WIDE_INT)
10450 mhi = (unsigned HOST_WIDE_INT) -1
10451 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10457 mlo = (unsigned HOST_WIDE_INT) -1
10458 >> (HOST_BITS_PER_WIDE_INT - width);
10461 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10462 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10463 return fold_build2 (BIT_IOR_EXPR, type,
10464 TREE_OPERAND (arg0, 0), arg1);
10466 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10467 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10468 mode which allows further optimizations. */
10475 for (w = BITS_PER_UNIT;
10476 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10479 unsigned HOST_WIDE_INT mask
10480 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10481 if (((lo1 | lo2) & mask) == mask
10482 && (lo1 & ~mask) == 0 && hi1 == 0)
10489 if (hi3 != hi1 || lo3 != lo1)
10490 return fold_build2 (BIT_IOR_EXPR, type,
10491 fold_build2 (BIT_AND_EXPR, type,
10492 TREE_OPERAND (arg0, 0),
10493 build_int_cst_wide (type,
10498 /* (X & Y) | Y is (X, Y). */
10499 if (TREE_CODE (arg0) == BIT_AND_EXPR
10500 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10501 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10502 /* (X & Y) | X is (Y, X). */
10503 if (TREE_CODE (arg0) == BIT_AND_EXPR
10504 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10505 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10506 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10507 /* X | (X & Y) is (Y, X). */
10508 if (TREE_CODE (arg1) == BIT_AND_EXPR
10509 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10510 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10511 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10512 /* X | (Y & X) is (Y, X). */
10513 if (TREE_CODE (arg1) == BIT_AND_EXPR
10514 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10515 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10516 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10518 t1 = distribute_bit_expr (code, type, arg0, arg1);
10519 if (t1 != NULL_TREE)
10522 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10524 This results in more efficient code for machines without a NAND
10525 instruction. Combine will canonicalize to the first form
10526 which will allow use of NAND instructions provided by the
10527 backend if they exist. */
10528 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10529 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10531 return fold_build1 (BIT_NOT_EXPR, type,
10532 build2 (BIT_AND_EXPR, type,
10533 fold_convert (type,
10534 TREE_OPERAND (arg0, 0)),
10535 fold_convert (type,
10536 TREE_OPERAND (arg1, 0))));
10539 /* See if this can be simplified into a rotate first. If that
10540 is unsuccessful continue in the association code. */
10544 if (integer_zerop (arg1))
10545 return non_lvalue (fold_convert (type, arg0));
10546 if (integer_all_onesp (arg1))
10547 return fold_build1 (BIT_NOT_EXPR, type, op0);
10548 if (operand_equal_p (arg0, arg1, 0))
10549 return omit_one_operand (type, integer_zero_node, arg0);
10551 /* ~X ^ X is -1. */
10552 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10553 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10555 t1 = fold_convert (type, integer_zero_node);
10556 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10557 return omit_one_operand (type, t1, arg1);
10560 /* X ^ ~X is -1. */
10561 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10562 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10564 t1 = fold_convert (type, integer_zero_node);
10565 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10566 return omit_one_operand (type, t1, arg0);
10569 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10570 with a constant, and the two constants have no bits in common,
10571 we should treat this as a BIT_IOR_EXPR since this may produce more
10572 simplifications. */
10573 if (TREE_CODE (arg0) == BIT_AND_EXPR
10574 && TREE_CODE (arg1) == BIT_AND_EXPR
10575 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10576 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10577 && integer_zerop (const_binop (BIT_AND_EXPR,
10578 TREE_OPERAND (arg0, 1),
10579 TREE_OPERAND (arg1, 1), 0)))
10581 code = BIT_IOR_EXPR;
10585 /* (X | Y) ^ X -> Y & ~ X*/
10586 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10587 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10589 tree t2 = TREE_OPERAND (arg0, 1);
10590 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10592 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10593 fold_convert (type, t1));
10597 /* (Y | X) ^ X -> Y & ~ X*/
10598 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10599 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10601 tree t2 = TREE_OPERAND (arg0, 0);
10602 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10604 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10605 fold_convert (type, t1));
10609 /* X ^ (X | Y) -> Y & ~ X*/
10610 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10611 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10613 tree t2 = TREE_OPERAND (arg1, 1);
10614 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10616 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10617 fold_convert (type, t1));
10621 /* X ^ (Y | X) -> Y & ~ X*/
10622 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10623 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10625 tree t2 = TREE_OPERAND (arg1, 0);
10626 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10628 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10629 fold_convert (type, t1));
10633 /* Convert ~X ^ ~Y to X ^ Y. */
10634 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10635 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10636 return fold_build2 (code, type,
10637 fold_convert (type, TREE_OPERAND (arg0, 0)),
10638 fold_convert (type, TREE_OPERAND (arg1, 0)));
10640 /* Convert ~X ^ C to X ^ ~C. */
10641 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10642 && TREE_CODE (arg1) == INTEGER_CST)
10643 return fold_build2 (code, type,
10644 fold_convert (type, TREE_OPERAND (arg0, 0)),
10645 fold_build1 (BIT_NOT_EXPR, type, arg1));
10647 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10648 if (TREE_CODE (arg0) == BIT_AND_EXPR
10649 && integer_onep (TREE_OPERAND (arg0, 1))
10650 && integer_onep (arg1))
10651 return fold_build2 (EQ_EXPR, type, arg0,
10652 build_int_cst (TREE_TYPE (arg0), 0));
10654 /* Fold (X & Y) ^ Y as ~X & Y. */
10655 if (TREE_CODE (arg0) == BIT_AND_EXPR
10656 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10658 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10659 return fold_build2 (BIT_AND_EXPR, type,
10660 fold_build1 (BIT_NOT_EXPR, type, tem),
10661 fold_convert (type, arg1));
10663 /* Fold (X & Y) ^ X as ~Y & X. */
10664 if (TREE_CODE (arg0) == BIT_AND_EXPR
10665 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10666 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10668 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10669 return fold_build2 (BIT_AND_EXPR, type,
10670 fold_build1 (BIT_NOT_EXPR, type, tem),
10671 fold_convert (type, arg1));
10673 /* Fold X ^ (X & Y) as X & ~Y. */
10674 if (TREE_CODE (arg1) == BIT_AND_EXPR
10675 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10677 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10678 return fold_build2 (BIT_AND_EXPR, type,
10679 fold_convert (type, arg0),
10680 fold_build1 (BIT_NOT_EXPR, type, tem));
10682 /* Fold X ^ (Y & X) as ~Y & X. */
10683 if (TREE_CODE (arg1) == BIT_AND_EXPR
10684 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10685 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10687 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10688 return fold_build2 (BIT_AND_EXPR, type,
10689 fold_build1 (BIT_NOT_EXPR, type, tem),
10690 fold_convert (type, arg0));
10693 /* See if this can be simplified into a rotate first. If that
10694 is unsuccessful continue in the association code. */
10698 if (integer_all_onesp (arg1))
10699 return non_lvalue (fold_convert (type, arg0));
10700 if (integer_zerop (arg1))
10701 return omit_one_operand (type, arg1, arg0);
10702 if (operand_equal_p (arg0, arg1, 0))
10703 return non_lvalue (fold_convert (type, arg0));
10705 /* ~X & X is always zero. */
10706 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10707 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10708 return omit_one_operand (type, integer_zero_node, arg1);
10710 /* X & ~X is always zero. */
10711 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10712 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10713 return omit_one_operand (type, integer_zero_node, arg0);
10715 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10716 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10717 && TREE_CODE (arg1) == INTEGER_CST
10718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10720 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10721 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10722 TREE_OPERAND (arg0, 0), tmp1);
10723 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10724 TREE_OPERAND (arg0, 1), tmp1);
10725 return fold_convert (type,
10726 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10730 /* (X | Y) & Y is (X, Y). */
10731 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10732 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10733 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10734 /* (X | Y) & X is (Y, X). */
10735 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10736 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10737 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10738 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10739 /* X & (X | Y) is (Y, X). */
10740 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10741 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10742 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10743 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10744 /* X & (Y | X) is (Y, X). */
10745 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10746 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10747 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10748 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10750 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10751 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10752 && integer_onep (TREE_OPERAND (arg0, 1))
10753 && integer_onep (arg1))
10755 tem = TREE_OPERAND (arg0, 0);
10756 return fold_build2 (EQ_EXPR, type,
10757 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10758 build_int_cst (TREE_TYPE (tem), 1)),
10759 build_int_cst (TREE_TYPE (tem), 0));
10761 /* Fold ~X & 1 as (X & 1) == 0. */
10762 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10763 && integer_onep (arg1))
10765 tem = TREE_OPERAND (arg0, 0);
10766 return fold_build2 (EQ_EXPR, type,
10767 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10768 build_int_cst (TREE_TYPE (tem), 1)),
10769 build_int_cst (TREE_TYPE (tem), 0));
10772 /* Fold (X ^ Y) & Y as ~X & Y. */
10773 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10774 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10776 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10777 return fold_build2 (BIT_AND_EXPR, type,
10778 fold_build1 (BIT_NOT_EXPR, type, tem),
10779 fold_convert (type, arg1));
10781 /* Fold (X ^ Y) & X as ~Y & X. */
10782 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10783 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10784 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10786 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10787 return fold_build2 (BIT_AND_EXPR, type,
10788 fold_build1 (BIT_NOT_EXPR, type, tem),
10789 fold_convert (type, arg1));
10791 /* Fold X & (X ^ Y) as X & ~Y. */
10792 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10793 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10795 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10796 return fold_build2 (BIT_AND_EXPR, type,
10797 fold_convert (type, arg0),
10798 fold_build1 (BIT_NOT_EXPR, type, tem));
10800 /* Fold X & (Y ^ X) as ~Y & X. */
10801 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10802 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10803 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10805 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10806 return fold_build2 (BIT_AND_EXPR, type,
10807 fold_build1 (BIT_NOT_EXPR, type, tem),
10808 fold_convert (type, arg0));
10811 t1 = distribute_bit_expr (code, type, arg0, arg1);
10812 if (t1 != NULL_TREE)
10814 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10815 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10816 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10819 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10821 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10822 && (~TREE_INT_CST_LOW (arg1)
10823 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10824 return fold_convert (type, TREE_OPERAND (arg0, 0));
10827 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10829 This results in more efficient code for machines without a NOR
10830 instruction. Combine will canonicalize to the first form
10831 which will allow use of NOR instructions provided by the
10832 backend if they exist. */
10833 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10834 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10836 return fold_build1 (BIT_NOT_EXPR, type,
10837 build2 (BIT_IOR_EXPR, type,
10838 fold_convert (type,
10839 TREE_OPERAND (arg0, 0)),
10840 fold_convert (type,
10841 TREE_OPERAND (arg1, 0))));
10844 /* If arg0 is derived from the address of an object or function, we may
10845 be able to fold this expression using the object or function's
10847 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10849 unsigned HOST_WIDE_INT modulus, residue;
10850 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10852 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10854 /* This works because modulus is a power of 2. If this weren't the
10855 case, we'd have to replace it by its greatest power-of-2
10856 divisor: modulus & -modulus. */
10858 return build_int_cst (type, residue & low);
10861 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10862 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10863 if the new mask might be further optimized. */
10864 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10865 || TREE_CODE (arg0) == RSHIFT_EXPR)
10866 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10867 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10868 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10869 < TYPE_PRECISION (TREE_TYPE (arg0))
10870 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10871 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10873 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10874 unsigned HOST_WIDE_INT mask
10875 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10876 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10877 tree shift_type = TREE_TYPE (arg0);
10879 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10880 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10881 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10882 && TYPE_PRECISION (TREE_TYPE (arg0))
10883 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10885 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10886 tree arg00 = TREE_OPERAND (arg0, 0);
10887 /* See if more bits can be proven as zero because of
10889 if (TREE_CODE (arg00) == NOP_EXPR
10890 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10892 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10893 if (TYPE_PRECISION (inner_type)
10894 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10895 && TYPE_PRECISION (inner_type) < prec)
10897 prec = TYPE_PRECISION (inner_type);
10898 /* See if we can shorten the right shift. */
10900 shift_type = inner_type;
10903 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10904 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10905 zerobits <<= prec - shiftc;
10906 /* For arithmetic shift if sign bit could be set, zerobits
10907 can contain actually sign bits, so no transformation is
10908 possible, unless MASK masks them all away. In that
10909 case the shift needs to be converted into logical shift. */
10910 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10911 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10913 if ((mask & zerobits) == 0)
10914 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10920 /* ((X << 16) & 0xff00) is (X, 0). */
10921 if ((mask & zerobits) == mask)
10922 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10924 newmask = mask | zerobits;
10925 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10929 /* Only do the transformation if NEWMASK is some integer
10931 for (prec = BITS_PER_UNIT;
10932 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10933 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10935 if (prec < HOST_BITS_PER_WIDE_INT
10936 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10938 if (shift_type != TREE_TYPE (arg0))
10940 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10941 fold_convert (shift_type,
10942 TREE_OPERAND (arg0, 0)),
10943 TREE_OPERAND (arg0, 1));
10944 tem = fold_convert (type, tem);
10948 return fold_build2 (BIT_AND_EXPR, type, tem,
10949 build_int_cst_type (TREE_TYPE (op1),
10958 /* Don't touch a floating-point divide by zero unless the mode
10959 of the constant can represent infinity. */
10960 if (TREE_CODE (arg1) == REAL_CST
10961 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10962 && real_zerop (arg1))
10965 /* Optimize A / A to 1.0 if we don't care about
10966 NaNs or Infinities. Skip the transformation
10967 for non-real operands. */
10968 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10969 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10970 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10971 && operand_equal_p (arg0, arg1, 0))
10973 tree r = build_real (TREE_TYPE (arg0), dconst1);
10975 return omit_two_operands (type, r, arg0, arg1);
10978 /* The complex version of the above A / A optimization. */
10979 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10980 && operand_equal_p (arg0, arg1, 0))
10982 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10983 if (! HONOR_NANS (TYPE_MODE (elem_type))
10984 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10986 tree r = build_real (elem_type, dconst1);
10987 /* omit_two_operands will call fold_convert for us. */
10988 return omit_two_operands (type, r, arg0, arg1);
10992 /* (-A) / (-B) -> A / B */
10993 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10994 return fold_build2 (RDIV_EXPR, type,
10995 TREE_OPERAND (arg0, 0),
10996 negate_expr (arg1));
10997 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10998 return fold_build2 (RDIV_EXPR, type,
10999 negate_expr (arg0),
11000 TREE_OPERAND (arg1, 0));
11002 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11003 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11004 && real_onep (arg1))
11005 return non_lvalue (fold_convert (type, arg0));
11007 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11008 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11009 && real_minus_onep (arg1))
11010 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11012 /* If ARG1 is a constant, we can convert this to a multiply by the
11013 reciprocal. This does not have the same rounding properties,
11014 so only do this if -freciprocal-math. We can actually
11015 always safely do it if ARG1 is a power of two, but it's hard to
11016 tell if it is or not in a portable manner. */
11017 if (TREE_CODE (arg1) == REAL_CST)
11019 if (flag_reciprocal_math
11020 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11022 return fold_build2 (MULT_EXPR, type, arg0, tem);
11023 /* Find the reciprocal if optimizing and the result is exact. */
11027 r = TREE_REAL_CST (arg1);
11028 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11030 tem = build_real (type, r);
11031 return fold_build2 (MULT_EXPR, type,
11032 fold_convert (type, arg0), tem);
11036 /* Convert A/B/C to A/(B*C). */
11037 if (flag_reciprocal_math
11038 && TREE_CODE (arg0) == RDIV_EXPR)
11039 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11040 fold_build2 (MULT_EXPR, type,
11041 TREE_OPERAND (arg0, 1), arg1));
11043 /* Convert A/(B/C) to (A/B)*C. */
11044 if (flag_reciprocal_math
11045 && TREE_CODE (arg1) == RDIV_EXPR)
11046 return fold_build2 (MULT_EXPR, type,
11047 fold_build2 (RDIV_EXPR, type, arg0,
11048 TREE_OPERAND (arg1, 0)),
11049 TREE_OPERAND (arg1, 1));
11051 /* Convert C1/(X*C2) into (C1/C2)/X. */
11052 if (flag_reciprocal_math
11053 && TREE_CODE (arg1) == MULT_EXPR
11054 && TREE_CODE (arg0) == REAL_CST
11055 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11057 tree tem = const_binop (RDIV_EXPR, arg0,
11058 TREE_OPERAND (arg1, 1), 0);
11060 return fold_build2 (RDIV_EXPR, type, tem,
11061 TREE_OPERAND (arg1, 0));
11064 if (flag_unsafe_math_optimizations)
11066 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11067 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11069 /* Optimize sin(x)/cos(x) as tan(x). */
11070 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11071 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11072 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11073 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11074 CALL_EXPR_ARG (arg1, 0), 0))
11076 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11078 if (tanfn != NULL_TREE)
11079 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11082 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11083 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11084 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11085 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11086 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11087 CALL_EXPR_ARG (arg1, 0), 0))
11089 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11091 if (tanfn != NULL_TREE)
11093 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11094 return fold_build2 (RDIV_EXPR, type,
11095 build_real (type, dconst1), tmp);
11099 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11100 NaNs or Infinities. */
11101 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11102 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11103 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11105 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11106 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11108 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11109 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11110 && operand_equal_p (arg00, arg01, 0))
11112 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11114 if (cosfn != NULL_TREE)
11115 return build_call_expr (cosfn, 1, arg00);
11119 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11120 NaNs or Infinities. */
11121 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11122 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11123 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11125 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11126 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11128 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11129 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11130 && operand_equal_p (arg00, arg01, 0))
11132 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11134 if (cosfn != NULL_TREE)
11136 tree tmp = build_call_expr (cosfn, 1, arg00);
11137 return fold_build2 (RDIV_EXPR, type,
11138 build_real (type, dconst1),
11144 /* Optimize pow(x,c)/x as pow(x,c-1). */
11145 if (fcode0 == BUILT_IN_POW
11146 || fcode0 == BUILT_IN_POWF
11147 || fcode0 == BUILT_IN_POWL)
11149 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11150 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11151 if (TREE_CODE (arg01) == REAL_CST
11152 && !TREE_OVERFLOW (arg01)
11153 && operand_equal_p (arg1, arg00, 0))
11155 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11159 c = TREE_REAL_CST (arg01);
11160 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11161 arg = build_real (type, c);
11162 return build_call_expr (powfn, 2, arg1, arg);
11166 /* Optimize a/root(b/c) into a*root(c/b). */
11167 if (BUILTIN_ROOT_P (fcode1))
11169 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11171 if (TREE_CODE (rootarg) == RDIV_EXPR)
11173 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11174 tree b = TREE_OPERAND (rootarg, 0);
11175 tree c = TREE_OPERAND (rootarg, 1);
11177 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11179 tmp = build_call_expr (rootfn, 1, tmp);
11180 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11184 /* Optimize x/expN(y) into x*expN(-y). */
11185 if (BUILTIN_EXPONENT_P (fcode1))
11187 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11188 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11189 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11190 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11193 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11194 if (fcode1 == BUILT_IN_POW
11195 || fcode1 == BUILT_IN_POWF
11196 || fcode1 == BUILT_IN_POWL)
11198 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11199 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11200 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11201 tree neg11 = fold_convert (type, negate_expr (arg11));
11202 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11203 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11208 case TRUNC_DIV_EXPR:
11209 case FLOOR_DIV_EXPR:
11210 /* Simplify A / (B << N) where A and B are positive and B is
11211 a power of 2, to A >> (N + log2(B)). */
11212 strict_overflow_p = false;
11213 if (TREE_CODE (arg1) == LSHIFT_EXPR
11214 && (TYPE_UNSIGNED (type)
11215 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11217 tree sval = TREE_OPERAND (arg1, 0);
11218 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11220 tree sh_cnt = TREE_OPERAND (arg1, 1);
11221 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11223 if (strict_overflow_p)
11224 fold_overflow_warning (("assuming signed overflow does not "
11225 "occur when simplifying A / (B << N)"),
11226 WARN_STRICT_OVERFLOW_MISC);
11228 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11229 sh_cnt, build_int_cst (NULL_TREE, pow2));
11230 return fold_build2 (RSHIFT_EXPR, type,
11231 fold_convert (type, arg0), sh_cnt);
11235 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11236 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11237 if (INTEGRAL_TYPE_P (type)
11238 && TYPE_UNSIGNED (type)
11239 && code == FLOOR_DIV_EXPR)
11240 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11244 case ROUND_DIV_EXPR:
11245 case CEIL_DIV_EXPR:
11246 case EXACT_DIV_EXPR:
11247 if (integer_onep (arg1))
11248 return non_lvalue (fold_convert (type, arg0));
11249 if (integer_zerop (arg1))
11251 /* X / -1 is -X. */
11252 if (!TYPE_UNSIGNED (type)
11253 && TREE_CODE (arg1) == INTEGER_CST
11254 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11255 && TREE_INT_CST_HIGH (arg1) == -1)
11256 return fold_convert (type, negate_expr (arg0));
11258 /* Convert -A / -B to A / B when the type is signed and overflow is
11260 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11261 && TREE_CODE (arg0) == NEGATE_EXPR
11262 && negate_expr_p (arg1))
11264 if (INTEGRAL_TYPE_P (type))
11265 fold_overflow_warning (("assuming signed overflow does not occur "
11266 "when distributing negation across "
11268 WARN_STRICT_OVERFLOW_MISC);
11269 return fold_build2 (code, type,
11270 fold_convert (type, TREE_OPERAND (arg0, 0)),
11271 negate_expr (arg1));
11273 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11274 && TREE_CODE (arg1) == NEGATE_EXPR
11275 && negate_expr_p (arg0))
11277 if (INTEGRAL_TYPE_P (type))
11278 fold_overflow_warning (("assuming signed overflow does not occur "
11279 "when distributing negation across "
11281 WARN_STRICT_OVERFLOW_MISC);
11282 return fold_build2 (code, type, negate_expr (arg0),
11283 TREE_OPERAND (arg1, 0));
11286 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11287 operation, EXACT_DIV_EXPR.
11289 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11290 At one time others generated faster code, it's not clear if they do
11291 after the last round to changes to the DIV code in expmed.c. */
11292 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11293 && multiple_of_p (type, arg0, arg1))
11294 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11296 strict_overflow_p = false;
11297 if (TREE_CODE (arg1) == INTEGER_CST
11298 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11299 &strict_overflow_p)))
11301 if (strict_overflow_p)
11302 fold_overflow_warning (("assuming signed overflow does not occur "
11303 "when simplifying division"),
11304 WARN_STRICT_OVERFLOW_MISC);
11305 return fold_convert (type, tem);
11310 case CEIL_MOD_EXPR:
11311 case FLOOR_MOD_EXPR:
11312 case ROUND_MOD_EXPR:
11313 case TRUNC_MOD_EXPR:
11314 /* X % 1 is always zero, but be sure to preserve any side
11316 if (integer_onep (arg1))
11317 return omit_one_operand (type, integer_zero_node, arg0);
11319 /* X % 0, return X % 0 unchanged so that we can get the
11320 proper warnings and errors. */
11321 if (integer_zerop (arg1))
11324 /* 0 % X is always zero, but be sure to preserve any side
11325 effects in X. Place this after checking for X == 0. */
11326 if (integer_zerop (arg0))
11327 return omit_one_operand (type, integer_zero_node, arg1);
11329 /* X % -1 is zero. */
11330 if (!TYPE_UNSIGNED (type)
11331 && TREE_CODE (arg1) == INTEGER_CST
11332 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11333 && TREE_INT_CST_HIGH (arg1) == -1)
11334 return omit_one_operand (type, integer_zero_node, arg0);
11336 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11337 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11338 strict_overflow_p = false;
11339 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11340 && (TYPE_UNSIGNED (type)
11341 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11344 /* Also optimize A % (C << N) where C is a power of 2,
11345 to A & ((C << N) - 1). */
11346 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11347 c = TREE_OPERAND (arg1, 0);
11349 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11351 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11352 build_int_cst (TREE_TYPE (arg1), 1));
11353 if (strict_overflow_p)
11354 fold_overflow_warning (("assuming signed overflow does not "
11355 "occur when simplifying "
11356 "X % (power of two)"),
11357 WARN_STRICT_OVERFLOW_MISC);
11358 return fold_build2 (BIT_AND_EXPR, type,
11359 fold_convert (type, arg0),
11360 fold_convert (type, mask));
11364 /* X % -C is the same as X % C. */
11365 if (code == TRUNC_MOD_EXPR
11366 && !TYPE_UNSIGNED (type)
11367 && TREE_CODE (arg1) == INTEGER_CST
11368 && !TREE_OVERFLOW (arg1)
11369 && TREE_INT_CST_HIGH (arg1) < 0
11370 && !TYPE_OVERFLOW_TRAPS (type)
11371 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11372 && !sign_bit_p (arg1, arg1))
11373 return fold_build2 (code, type, fold_convert (type, arg0),
11374 fold_convert (type, negate_expr (arg1)));
11376 /* X % -Y is the same as X % Y. */
11377 if (code == TRUNC_MOD_EXPR
11378 && !TYPE_UNSIGNED (type)
11379 && TREE_CODE (arg1) == NEGATE_EXPR
11380 && !TYPE_OVERFLOW_TRAPS (type))
11381 return fold_build2 (code, type, fold_convert (type, arg0),
11382 fold_convert (type, TREE_OPERAND (arg1, 0)));
11384 if (TREE_CODE (arg1) == INTEGER_CST
11385 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11386 &strict_overflow_p)))
11388 if (strict_overflow_p)
11389 fold_overflow_warning (("assuming signed overflow does not occur "
11390 "when simplifying modulus"),
11391 WARN_STRICT_OVERFLOW_MISC);
11392 return fold_convert (type, tem);
11399 if (integer_all_onesp (arg0))
11400 return omit_one_operand (type, arg0, arg1);
11404 /* Optimize -1 >> x for arithmetic right shifts. */
11405 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11406 return omit_one_operand (type, arg0, arg1);
11407 /* ... fall through ... */
11411 if (integer_zerop (arg1))
11412 return non_lvalue (fold_convert (type, arg0));
11413 if (integer_zerop (arg0))
11414 return omit_one_operand (type, arg0, arg1);
11416 /* Since negative shift count is not well-defined,
11417 don't try to compute it in the compiler. */
11418 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11421 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11422 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11423 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11424 && host_integerp (TREE_OPERAND (arg0, 1), false)
11425 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11427 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11428 + TREE_INT_CST_LOW (arg1));
11430 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11431 being well defined. */
11432 if (low >= TYPE_PRECISION (type))
11434 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11435 low = low % TYPE_PRECISION (type);
11436 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11437 return build_int_cst (type, 0);
11439 low = TYPE_PRECISION (type) - 1;
11442 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11443 build_int_cst (type, low));
11446 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11447 into x & ((unsigned)-1 >> c) for unsigned types. */
11448 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11449 || (TYPE_UNSIGNED (type)
11450 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11451 && host_integerp (arg1, false)
11452 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11453 && host_integerp (TREE_OPERAND (arg0, 1), false)
11454 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11456 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11457 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11463 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11465 lshift = build_int_cst (type, -1);
11466 lshift = int_const_binop (code, lshift, arg1, 0);
11468 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11472 /* Rewrite an LROTATE_EXPR by a constant into an
11473 RROTATE_EXPR by a new constant. */
11474 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11476 tree tem = build_int_cst (TREE_TYPE (arg1),
11477 TYPE_PRECISION (type));
11478 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11479 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11482 /* If we have a rotate of a bit operation with the rotate count and
11483 the second operand of the bit operation both constant,
11484 permute the two operations. */
11485 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11486 && (TREE_CODE (arg0) == BIT_AND_EXPR
11487 || TREE_CODE (arg0) == BIT_IOR_EXPR
11488 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11489 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11490 return fold_build2 (TREE_CODE (arg0), type,
11491 fold_build2 (code, type,
11492 TREE_OPERAND (arg0, 0), arg1),
11493 fold_build2 (code, type,
11494 TREE_OPERAND (arg0, 1), arg1));
11496 /* Two consecutive rotates adding up to the precision of the
11497 type can be ignored. */
11498 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11499 && TREE_CODE (arg0) == RROTATE_EXPR
11500 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11501 && TREE_INT_CST_HIGH (arg1) == 0
11502 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11503 && ((TREE_INT_CST_LOW (arg1)
11504 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11505 == (unsigned int) TYPE_PRECISION (type)))
11506 return TREE_OPERAND (arg0, 0);
11508 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11509 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11510 if the latter can be further optimized. */
11511 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11512 && TREE_CODE (arg0) == BIT_AND_EXPR
11513 && TREE_CODE (arg1) == INTEGER_CST
11514 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11516 tree mask = fold_build2 (code, type,
11517 fold_convert (type, TREE_OPERAND (arg0, 1)),
11519 tree shift = fold_build2 (code, type,
11520 fold_convert (type, TREE_OPERAND (arg0, 0)),
11522 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11530 if (operand_equal_p (arg0, arg1, 0))
11531 return omit_one_operand (type, arg0, arg1);
11532 if (INTEGRAL_TYPE_P (type)
11533 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11534 return omit_one_operand (type, arg1, arg0);
11535 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11541 if (operand_equal_p (arg0, arg1, 0))
11542 return omit_one_operand (type, arg0, arg1);
11543 if (INTEGRAL_TYPE_P (type)
11544 && TYPE_MAX_VALUE (type)
11545 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11546 return omit_one_operand (type, arg1, arg0);
11547 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11552 case TRUTH_ANDIF_EXPR:
11553 /* Note that the operands of this must be ints
11554 and their values must be 0 or 1.
11555 ("true" is a fixed value perhaps depending on the language.) */
11556 /* If first arg is constant zero, return it. */
11557 if (integer_zerop (arg0))
11558 return fold_convert (type, arg0);
11559 case TRUTH_AND_EXPR:
11560 /* If either arg is constant true, drop it. */
11561 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11562 return non_lvalue (fold_convert (type, arg1));
11563 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11564 /* Preserve sequence points. */
11565 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11566 return non_lvalue (fold_convert (type, arg0));
11567 /* If second arg is constant zero, result is zero, but first arg
11568 must be evaluated. */
11569 if (integer_zerop (arg1))
11570 return omit_one_operand (type, arg1, arg0);
11571 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11572 case will be handled here. */
11573 if (integer_zerop (arg0))
11574 return omit_one_operand (type, arg0, arg1);
11576 /* !X && X is always false. */
11577 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11578 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11579 return omit_one_operand (type, integer_zero_node, arg1);
11580 /* X && !X is always false. */
11581 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11582 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11583 return omit_one_operand (type, integer_zero_node, arg0);
11585 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11586 means A >= Y && A != MAX, but in this case we know that
11589 if (!TREE_SIDE_EFFECTS (arg0)
11590 && !TREE_SIDE_EFFECTS (arg1))
11592 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11593 if (tem && !operand_equal_p (tem, arg0, 0))
11594 return fold_build2 (code, type, tem, arg1);
11596 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11597 if (tem && !operand_equal_p (tem, arg1, 0))
11598 return fold_build2 (code, type, arg0, tem);
11602 /* We only do these simplifications if we are optimizing. */
11606 /* Check for things like (A || B) && (A || C). We can convert this
11607 to A || (B && C). Note that either operator can be any of the four
11608 truth and/or operations and the transformation will still be
11609 valid. Also note that we only care about order for the
11610 ANDIF and ORIF operators. If B contains side effects, this
11611 might change the truth-value of A. */
11612 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11613 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11614 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11615 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11616 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11617 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11619 tree a00 = TREE_OPERAND (arg0, 0);
11620 tree a01 = TREE_OPERAND (arg0, 1);
11621 tree a10 = TREE_OPERAND (arg1, 0);
11622 tree a11 = TREE_OPERAND (arg1, 1);
11623 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11624 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11625 && (code == TRUTH_AND_EXPR
11626 || code == TRUTH_OR_EXPR));
11628 if (operand_equal_p (a00, a10, 0))
11629 return fold_build2 (TREE_CODE (arg0), type, a00,
11630 fold_build2 (code, type, a01, a11));
11631 else if (commutative && operand_equal_p (a00, a11, 0))
11632 return fold_build2 (TREE_CODE (arg0), type, a00,
11633 fold_build2 (code, type, a01, a10));
11634 else if (commutative && operand_equal_p (a01, a10, 0))
11635 return fold_build2 (TREE_CODE (arg0), type, a01,
11636 fold_build2 (code, type, a00, a11));
11638 /* This case if tricky because we must either have commutative
11639 operators or else A10 must not have side-effects. */
11641 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11642 && operand_equal_p (a01, a11, 0))
11643 return fold_build2 (TREE_CODE (arg0), type,
11644 fold_build2 (code, type, a00, a10),
11648 /* See if we can build a range comparison. */
11649 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11652 /* Check for the possibility of merging component references. If our
11653 lhs is another similar operation, try to merge its rhs with our
11654 rhs. Then try to merge our lhs and rhs. */
11655 if (TREE_CODE (arg0) == code
11656 && 0 != (tem = fold_truthop (code, type,
11657 TREE_OPERAND (arg0, 1), arg1)))
11658 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11660 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11665 case TRUTH_ORIF_EXPR:
11666 /* Note that the operands of this must be ints
11667 and their values must be 0 or true.
11668 ("true" is a fixed value perhaps depending on the language.) */
11669 /* If first arg is constant true, return it. */
11670 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11671 return fold_convert (type, arg0);
11672 case TRUTH_OR_EXPR:
11673 /* If either arg is constant zero, drop it. */
11674 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11675 return non_lvalue (fold_convert (type, arg1));
11676 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11677 /* Preserve sequence points. */
11678 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11679 return non_lvalue (fold_convert (type, arg0));
11680 /* If second arg is constant true, result is true, but we must
11681 evaluate first arg. */
11682 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11683 return omit_one_operand (type, arg1, arg0);
11684 /* Likewise for first arg, but note this only occurs here for
11686 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11687 return omit_one_operand (type, arg0, arg1);
11689 /* !X || X is always true. */
11690 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11691 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11692 return omit_one_operand (type, integer_one_node, arg1);
11693 /* X || !X is always true. */
11694 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11695 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11696 return omit_one_operand (type, integer_one_node, arg0);
11700 case TRUTH_XOR_EXPR:
11701 /* If the second arg is constant zero, drop it. */
11702 if (integer_zerop (arg1))
11703 return non_lvalue (fold_convert (type, arg0));
11704 /* If the second arg is constant true, this is a logical inversion. */
11705 if (integer_onep (arg1))
11707 /* Only call invert_truthvalue if operand is a truth value. */
11708 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11709 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11711 tem = invert_truthvalue (arg0);
11712 return non_lvalue (fold_convert (type, tem));
11714 /* Identical arguments cancel to zero. */
11715 if (operand_equal_p (arg0, arg1, 0))
11716 return omit_one_operand (type, integer_zero_node, arg0);
11718 /* !X ^ X is always true. */
11719 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11720 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11721 return omit_one_operand (type, integer_one_node, arg1);
11723 /* X ^ !X is always true. */
11724 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11725 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11726 return omit_one_operand (type, integer_one_node, arg0);
11732 tem = fold_comparison (code, type, op0, op1);
11733 if (tem != NULL_TREE)
11736 /* bool_var != 0 becomes bool_var. */
11737 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11738 && code == NE_EXPR)
11739 return non_lvalue (fold_convert (type, arg0));
11741 /* bool_var == 1 becomes bool_var. */
11742 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11743 && code == EQ_EXPR)
11744 return non_lvalue (fold_convert (type, arg0));
11746 /* bool_var != 1 becomes !bool_var. */
11747 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11748 && code == NE_EXPR)
11749 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11751 /* bool_var == 0 becomes !bool_var. */
11752 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11753 && code == EQ_EXPR)
11754 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11756 /* If this is an equality comparison of the address of two non-weak,
11757 unaliased symbols neither of which are extern (since we do not
11758 have access to attributes for externs), then we know the result. */
11759 if (TREE_CODE (arg0) == ADDR_EXPR
11760 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11761 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11762 && ! lookup_attribute ("alias",
11763 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11764 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11765 && TREE_CODE (arg1) == ADDR_EXPR
11766 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11767 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11768 && ! lookup_attribute ("alias",
11769 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11770 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11772 /* We know that we're looking at the address of two
11773 non-weak, unaliased, static _DECL nodes.
11775 It is both wasteful and incorrect to call operand_equal_p
11776 to compare the two ADDR_EXPR nodes. It is wasteful in that
11777 all we need to do is test pointer equality for the arguments
11778 to the two ADDR_EXPR nodes. It is incorrect to use
11779 operand_equal_p as that function is NOT equivalent to a
11780 C equality test. It can in fact return false for two
11781 objects which would test as equal using the C equality
11783 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11784 return constant_boolean_node (equal
11785 ? code == EQ_EXPR : code != EQ_EXPR,
11789 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11790 a MINUS_EXPR of a constant, we can convert it into a comparison with
11791 a revised constant as long as no overflow occurs. */
11792 if (TREE_CODE (arg1) == INTEGER_CST
11793 && (TREE_CODE (arg0) == PLUS_EXPR
11794 || TREE_CODE (arg0) == MINUS_EXPR)
11795 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11796 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11797 ? MINUS_EXPR : PLUS_EXPR,
11798 fold_convert (TREE_TYPE (arg0), arg1),
11799 TREE_OPERAND (arg0, 1), 0))
11800 && !TREE_OVERFLOW (tem))
11801 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11803 /* Similarly for a NEGATE_EXPR. */
11804 if (TREE_CODE (arg0) == NEGATE_EXPR
11805 && TREE_CODE (arg1) == INTEGER_CST
11806 && 0 != (tem = negate_expr (arg1))
11807 && TREE_CODE (tem) == INTEGER_CST
11808 && !TREE_OVERFLOW (tem))
11809 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11811 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11812 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11813 && TREE_CODE (arg1) == INTEGER_CST
11814 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11815 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11816 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11817 fold_convert (TREE_TYPE (arg0), arg1),
11818 TREE_OPERAND (arg0, 1)));
11820 /* Transform comparisons of the form X +- C CMP X. */
11821 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11822 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11823 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11824 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11825 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11827 tree cst = TREE_OPERAND (arg0, 1);
11829 if (code == EQ_EXPR
11830 && !integer_zerop (cst))
11831 return omit_two_operands (type, boolean_false_node,
11832 TREE_OPERAND (arg0, 0), arg1);
11834 return omit_two_operands (type, boolean_true_node,
11835 TREE_OPERAND (arg0, 0), arg1);
11838 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11839 for !=. Don't do this for ordered comparisons due to overflow. */
11840 if (TREE_CODE (arg0) == MINUS_EXPR
11841 && integer_zerop (arg1))
11842 return fold_build2 (code, type,
11843 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11845 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11846 if (TREE_CODE (arg0) == ABS_EXPR
11847 && (integer_zerop (arg1) || real_zerop (arg1)))
11848 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11850 /* If this is an EQ or NE comparison with zero and ARG0 is
11851 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11852 two operations, but the latter can be done in one less insn
11853 on machines that have only two-operand insns or on which a
11854 constant cannot be the first operand. */
11855 if (TREE_CODE (arg0) == BIT_AND_EXPR
11856 && integer_zerop (arg1))
11858 tree arg00 = TREE_OPERAND (arg0, 0);
11859 tree arg01 = TREE_OPERAND (arg0, 1);
11860 if (TREE_CODE (arg00) == LSHIFT_EXPR
11861 && integer_onep (TREE_OPERAND (arg00, 0)))
11863 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11864 arg01, TREE_OPERAND (arg00, 1));
11865 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11866 build_int_cst (TREE_TYPE (arg0), 1));
11867 return fold_build2 (code, type,
11868 fold_convert (TREE_TYPE (arg1), tem), arg1);
11870 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11871 && integer_onep (TREE_OPERAND (arg01, 0)))
11873 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11874 arg00, TREE_OPERAND (arg01, 1));
11875 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11876 build_int_cst (TREE_TYPE (arg0), 1));
11877 return fold_build2 (code, type,
11878 fold_convert (TREE_TYPE (arg1), tem), arg1);
11882 /* If this is an NE or EQ comparison of zero against the result of a
11883 signed MOD operation whose second operand is a power of 2, make
11884 the MOD operation unsigned since it is simpler and equivalent. */
11885 if (integer_zerop (arg1)
11886 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11887 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11888 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11889 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11890 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11891 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11893 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11894 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11895 fold_convert (newtype,
11896 TREE_OPERAND (arg0, 0)),
11897 fold_convert (newtype,
11898 TREE_OPERAND (arg0, 1)));
11900 return fold_build2 (code, type, newmod,
11901 fold_convert (newtype, arg1));
11904 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11905 C1 is a valid shift constant, and C2 is a power of two, i.e.
11907 if (TREE_CODE (arg0) == BIT_AND_EXPR
11908 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11909 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11911 && integer_pow2p (TREE_OPERAND (arg0, 1))
11912 && integer_zerop (arg1))
11914 tree itype = TREE_TYPE (arg0);
11915 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11916 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11918 /* Check for a valid shift count. */
11919 if (TREE_INT_CST_HIGH (arg001) == 0
11920 && TREE_INT_CST_LOW (arg001) < prec)
11922 tree arg01 = TREE_OPERAND (arg0, 1);
11923 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11924 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11925 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11926 can be rewritten as (X & (C2 << C1)) != 0. */
11927 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11929 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11930 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11931 return fold_build2 (code, type, tem, arg1);
11933 /* Otherwise, for signed (arithmetic) shifts,
11934 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11935 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11936 else if (!TYPE_UNSIGNED (itype))
11937 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11938 arg000, build_int_cst (itype, 0));
11939 /* Otherwise, of unsigned (logical) shifts,
11940 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11941 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11943 return omit_one_operand (type,
11944 code == EQ_EXPR ? integer_one_node
11945 : integer_zero_node,
11950 /* If this is an NE comparison of zero with an AND of one, remove the
11951 comparison since the AND will give the correct value. */
11952 if (code == NE_EXPR
11953 && integer_zerop (arg1)
11954 && TREE_CODE (arg0) == BIT_AND_EXPR
11955 && integer_onep (TREE_OPERAND (arg0, 1)))
11956 return fold_convert (type, arg0);
11958 /* If we have (A & C) == C where C is a power of 2, convert this into
11959 (A & C) != 0. Similarly for NE_EXPR. */
11960 if (TREE_CODE (arg0) == BIT_AND_EXPR
11961 && integer_pow2p (TREE_OPERAND (arg0, 1))
11962 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11963 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11964 arg0, fold_convert (TREE_TYPE (arg0),
11965 integer_zero_node));
11967 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11968 bit, then fold the expression into A < 0 or A >= 0. */
11969 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11973 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11974 Similarly for NE_EXPR. */
11975 if (TREE_CODE (arg0) == BIT_AND_EXPR
11976 && TREE_CODE (arg1) == INTEGER_CST
11977 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11979 tree notc = fold_build1 (BIT_NOT_EXPR,
11980 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11981 TREE_OPERAND (arg0, 1));
11982 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11984 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11985 if (integer_nonzerop (dandnotc))
11986 return omit_one_operand (type, rslt, arg0);
11989 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11990 Similarly for NE_EXPR. */
11991 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11992 && TREE_CODE (arg1) == INTEGER_CST
11993 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11995 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11996 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11997 TREE_OPERAND (arg0, 1), notd);
11998 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11999 if (integer_nonzerop (candnotd))
12000 return omit_one_operand (type, rslt, arg0);
12003 /* Optimize comparisons of strlen vs zero to a compare of the
12004 first character of the string vs zero. To wit,
12005 strlen(ptr) == 0 => *ptr == 0
12006 strlen(ptr) != 0 => *ptr != 0
12007 Other cases should reduce to one of these two (or a constant)
12008 due to the return value of strlen being unsigned. */
12009 if (TREE_CODE (arg0) == CALL_EXPR
12010 && integer_zerop (arg1))
12012 tree fndecl = get_callee_fndecl (arg0);
12015 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12016 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12017 && call_expr_nargs (arg0) == 1
12018 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12020 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12021 return fold_build2 (code, type, iref,
12022 build_int_cst (TREE_TYPE (iref), 0));
12026 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12027 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12028 if (TREE_CODE (arg0) == RSHIFT_EXPR
12029 && integer_zerop (arg1)
12030 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12032 tree arg00 = TREE_OPERAND (arg0, 0);
12033 tree arg01 = TREE_OPERAND (arg0, 1);
12034 tree itype = TREE_TYPE (arg00);
12035 if (TREE_INT_CST_HIGH (arg01) == 0
12036 && TREE_INT_CST_LOW (arg01)
12037 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12039 if (TYPE_UNSIGNED (itype))
12041 itype = signed_type_for (itype);
12042 arg00 = fold_convert (itype, arg00);
12044 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12045 type, arg00, build_int_cst (itype, 0));
12049 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12050 if (integer_zerop (arg1)
12051 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12052 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12053 TREE_OPERAND (arg0, 1));
12055 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12056 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12057 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12058 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12059 build_int_cst (TREE_TYPE (arg1), 0));
12060 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12061 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12063 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12064 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12065 build_int_cst (TREE_TYPE (arg1), 0));
12067 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12068 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12069 && TREE_CODE (arg1) == INTEGER_CST
12070 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12071 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12072 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12073 TREE_OPERAND (arg0, 1), arg1));
12075 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12076 (X & C) == 0 when C is a single bit. */
12077 if (TREE_CODE (arg0) == BIT_AND_EXPR
12078 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12079 && integer_zerop (arg1)
12080 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12082 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12083 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12084 TREE_OPERAND (arg0, 1));
12085 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12089 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12090 constant C is a power of two, i.e. a single bit. */
12091 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12092 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12093 && integer_zerop (arg1)
12094 && integer_pow2p (TREE_OPERAND (arg0, 1))
12095 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12096 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12098 tree arg00 = TREE_OPERAND (arg0, 0);
12099 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12100 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12103 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12104 when is C is a power of two, i.e. a single bit. */
12105 if (TREE_CODE (arg0) == BIT_AND_EXPR
12106 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12107 && integer_zerop (arg1)
12108 && integer_pow2p (TREE_OPERAND (arg0, 1))
12109 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12110 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12112 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12113 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12114 arg000, TREE_OPERAND (arg0, 1));
12115 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12116 tem, build_int_cst (TREE_TYPE (tem), 0));
12119 if (integer_zerop (arg1)
12120 && tree_expr_nonzero_p (arg0))
12122 tree res = constant_boolean_node (code==NE_EXPR, type);
12123 return omit_one_operand (type, res, arg0);
12126 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12127 if (TREE_CODE (arg0) == NEGATE_EXPR
12128 && TREE_CODE (arg1) == NEGATE_EXPR)
12129 return fold_build2 (code, type,
12130 TREE_OPERAND (arg0, 0),
12131 TREE_OPERAND (arg1, 0));
12133 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12134 if (TREE_CODE (arg0) == BIT_AND_EXPR
12135 && TREE_CODE (arg1) == BIT_AND_EXPR)
12137 tree arg00 = TREE_OPERAND (arg0, 0);
12138 tree arg01 = TREE_OPERAND (arg0, 1);
12139 tree arg10 = TREE_OPERAND (arg1, 0);
12140 tree arg11 = TREE_OPERAND (arg1, 1);
12141 tree itype = TREE_TYPE (arg0);
12143 if (operand_equal_p (arg01, arg11, 0))
12144 return fold_build2 (code, type,
12145 fold_build2 (BIT_AND_EXPR, itype,
12146 fold_build2 (BIT_XOR_EXPR, itype,
12149 build_int_cst (itype, 0));
12151 if (operand_equal_p (arg01, arg10, 0))
12152 return fold_build2 (code, type,
12153 fold_build2 (BIT_AND_EXPR, itype,
12154 fold_build2 (BIT_XOR_EXPR, itype,
12157 build_int_cst (itype, 0));
12159 if (operand_equal_p (arg00, arg11, 0))
12160 return fold_build2 (code, type,
12161 fold_build2 (BIT_AND_EXPR, itype,
12162 fold_build2 (BIT_XOR_EXPR, itype,
12165 build_int_cst (itype, 0));
12167 if (operand_equal_p (arg00, arg10, 0))
12168 return fold_build2 (code, type,
12169 fold_build2 (BIT_AND_EXPR, itype,
12170 fold_build2 (BIT_XOR_EXPR, itype,
12173 build_int_cst (itype, 0));
12176 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12177 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12179 tree arg00 = TREE_OPERAND (arg0, 0);
12180 tree arg01 = TREE_OPERAND (arg0, 1);
12181 tree arg10 = TREE_OPERAND (arg1, 0);
12182 tree arg11 = TREE_OPERAND (arg1, 1);
12183 tree itype = TREE_TYPE (arg0);
12185 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12186 operand_equal_p guarantees no side-effects so we don't need
12187 to use omit_one_operand on Z. */
12188 if (operand_equal_p (arg01, arg11, 0))
12189 return fold_build2 (code, type, arg00, arg10);
12190 if (operand_equal_p (arg01, arg10, 0))
12191 return fold_build2 (code, type, arg00, arg11);
12192 if (operand_equal_p (arg00, arg11, 0))
12193 return fold_build2 (code, type, arg01, arg10);
12194 if (operand_equal_p (arg00, arg10, 0))
12195 return fold_build2 (code, type, arg01, arg11);
12197 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12198 if (TREE_CODE (arg01) == INTEGER_CST
12199 && TREE_CODE (arg11) == INTEGER_CST)
12200 return fold_build2 (code, type,
12201 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12202 fold_build2 (BIT_XOR_EXPR, itype,
12207 /* Attempt to simplify equality/inequality comparisons of complex
12208 values. Only lower the comparison if the result is known or
12209 can be simplified to a single scalar comparison. */
12210 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12211 || TREE_CODE (arg0) == COMPLEX_CST)
12212 && (TREE_CODE (arg1) == COMPLEX_EXPR
12213 || TREE_CODE (arg1) == COMPLEX_CST))
12215 tree real0, imag0, real1, imag1;
12218 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12220 real0 = TREE_OPERAND (arg0, 0);
12221 imag0 = TREE_OPERAND (arg0, 1);
12225 real0 = TREE_REALPART (arg0);
12226 imag0 = TREE_IMAGPART (arg0);
12229 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12231 real1 = TREE_OPERAND (arg1, 0);
12232 imag1 = TREE_OPERAND (arg1, 1);
12236 real1 = TREE_REALPART (arg1);
12237 imag1 = TREE_IMAGPART (arg1);
12240 rcond = fold_binary (code, type, real0, real1);
12241 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12243 if (integer_zerop (rcond))
12245 if (code == EQ_EXPR)
12246 return omit_two_operands (type, boolean_false_node,
12248 return fold_build2 (NE_EXPR, type, imag0, imag1);
12252 if (code == NE_EXPR)
12253 return omit_two_operands (type, boolean_true_node,
12255 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12259 icond = fold_binary (code, type, imag0, imag1);
12260 if (icond && TREE_CODE (icond) == INTEGER_CST)
12262 if (integer_zerop (icond))
12264 if (code == EQ_EXPR)
12265 return omit_two_operands (type, boolean_false_node,
12267 return fold_build2 (NE_EXPR, type, real0, real1);
12271 if (code == NE_EXPR)
12272 return omit_two_operands (type, boolean_true_node,
12274 return fold_build2 (EQ_EXPR, type, real0, real1);
12285 tem = fold_comparison (code, type, op0, op1);
12286 if (tem != NULL_TREE)
12289 /* Transform comparisons of the form X +- C CMP X. */
12290 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12291 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12292 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12293 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12294 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12295 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12297 tree arg01 = TREE_OPERAND (arg0, 1);
12298 enum tree_code code0 = TREE_CODE (arg0);
12301 if (TREE_CODE (arg01) == REAL_CST)
12302 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12304 is_positive = tree_int_cst_sgn (arg01);
12306 /* (X - c) > X becomes false. */
12307 if (code == GT_EXPR
12308 && ((code0 == MINUS_EXPR && is_positive >= 0)
12309 || (code0 == PLUS_EXPR && is_positive <= 0)))
12311 if (TREE_CODE (arg01) == INTEGER_CST
12312 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12313 fold_overflow_warning (("assuming signed overflow does not "
12314 "occur when assuming that (X - c) > X "
12315 "is always false"),
12316 WARN_STRICT_OVERFLOW_ALL);
12317 return constant_boolean_node (0, type);
12320 /* Likewise (X + c) < X becomes false. */
12321 if (code == LT_EXPR
12322 && ((code0 == PLUS_EXPR && is_positive >= 0)
12323 || (code0 == MINUS_EXPR && is_positive <= 0)))
12325 if (TREE_CODE (arg01) == INTEGER_CST
12326 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12327 fold_overflow_warning (("assuming signed overflow does not "
12328 "occur when assuming that "
12329 "(X + c) < X is always false"),
12330 WARN_STRICT_OVERFLOW_ALL);
12331 return constant_boolean_node (0, type);
12334 /* Convert (X - c) <= X to true. */
12335 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12337 && ((code0 == MINUS_EXPR && is_positive >= 0)
12338 || (code0 == PLUS_EXPR && is_positive <= 0)))
12340 if (TREE_CODE (arg01) == INTEGER_CST
12341 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12342 fold_overflow_warning (("assuming signed overflow does not "
12343 "occur when assuming that "
12344 "(X - c) <= X is always true"),
12345 WARN_STRICT_OVERFLOW_ALL);
12346 return constant_boolean_node (1, type);
12349 /* Convert (X + c) >= X to true. */
12350 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12352 && ((code0 == PLUS_EXPR && is_positive >= 0)
12353 || (code0 == MINUS_EXPR && is_positive <= 0)))
12355 if (TREE_CODE (arg01) == INTEGER_CST
12356 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12357 fold_overflow_warning (("assuming signed overflow does not "
12358 "occur when assuming that "
12359 "(X + c) >= X is always true"),
12360 WARN_STRICT_OVERFLOW_ALL);
12361 return constant_boolean_node (1, type);
12364 if (TREE_CODE (arg01) == INTEGER_CST)
12366 /* Convert X + c > X and X - c < X to true for integers. */
12367 if (code == GT_EXPR
12368 && ((code0 == PLUS_EXPR && is_positive > 0)
12369 || (code0 == MINUS_EXPR && is_positive < 0)))
12371 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12372 fold_overflow_warning (("assuming signed overflow does "
12373 "not occur when assuming that "
12374 "(X + c) > X is always true"),
12375 WARN_STRICT_OVERFLOW_ALL);
12376 return constant_boolean_node (1, type);
12379 if (code == LT_EXPR
12380 && ((code0 == MINUS_EXPR && is_positive > 0)
12381 || (code0 == PLUS_EXPR && is_positive < 0)))
12383 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12384 fold_overflow_warning (("assuming signed overflow does "
12385 "not occur when assuming that "
12386 "(X - c) < X is always true"),
12387 WARN_STRICT_OVERFLOW_ALL);
12388 return constant_boolean_node (1, type);
12391 /* Convert X + c <= X and X - c >= X to false for integers. */
12392 if (code == LE_EXPR
12393 && ((code0 == PLUS_EXPR && is_positive > 0)
12394 || (code0 == MINUS_EXPR && is_positive < 0)))
12396 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12397 fold_overflow_warning (("assuming signed overflow does "
12398 "not occur when assuming that "
12399 "(X + c) <= X is always false"),
12400 WARN_STRICT_OVERFLOW_ALL);
12401 return constant_boolean_node (0, type);
12404 if (code == GE_EXPR
12405 && ((code0 == MINUS_EXPR && is_positive > 0)
12406 || (code0 == PLUS_EXPR && is_positive < 0)))
12408 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12409 fold_overflow_warning (("assuming signed overflow does "
12410 "not occur when assuming that "
12411 "(X - c) >= X is always false"),
12412 WARN_STRICT_OVERFLOW_ALL);
12413 return constant_boolean_node (0, type);
12418 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12419 This transformation affects the cases which are handled in later
12420 optimizations involving comparisons with non-negative constants. */
12421 if (TREE_CODE (arg1) == INTEGER_CST
12422 && TREE_CODE (arg0) != INTEGER_CST
12423 && tree_int_cst_sgn (arg1) > 0)
12425 if (code == GE_EXPR)
12427 arg1 = const_binop (MINUS_EXPR, arg1,
12428 build_int_cst (TREE_TYPE (arg1), 1), 0);
12429 return fold_build2 (GT_EXPR, type, arg0,
12430 fold_convert (TREE_TYPE (arg0), arg1));
12432 if (code == LT_EXPR)
12434 arg1 = const_binop (MINUS_EXPR, arg1,
12435 build_int_cst (TREE_TYPE (arg1), 1), 0);
12436 return fold_build2 (LE_EXPR, type, arg0,
12437 fold_convert (TREE_TYPE (arg0), arg1));
12441 /* Comparisons with the highest or lowest possible integer of
12442 the specified precision will have known values. */
12444 tree arg1_type = TREE_TYPE (arg1);
12445 unsigned int width = TYPE_PRECISION (arg1_type);
12447 if (TREE_CODE (arg1) == INTEGER_CST
12448 && !TREE_OVERFLOW (arg1)
12449 && width <= 2 * HOST_BITS_PER_WIDE_INT
12450 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12452 HOST_WIDE_INT signed_max_hi;
12453 unsigned HOST_WIDE_INT signed_max_lo;
12454 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12456 if (width <= HOST_BITS_PER_WIDE_INT)
12458 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12463 if (TYPE_UNSIGNED (arg1_type))
12465 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12471 max_lo = signed_max_lo;
12472 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12478 width -= HOST_BITS_PER_WIDE_INT;
12479 signed_max_lo = -1;
12480 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12485 if (TYPE_UNSIGNED (arg1_type))
12487 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12492 max_hi = signed_max_hi;
12493 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12497 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12498 && TREE_INT_CST_LOW (arg1) == max_lo)
12502 return omit_one_operand (type, integer_zero_node, arg0);
12505 return fold_build2 (EQ_EXPR, type, op0, op1);
12508 return omit_one_operand (type, integer_one_node, arg0);
12511 return fold_build2 (NE_EXPR, type, op0, op1);
12513 /* The GE_EXPR and LT_EXPR cases above are not normally
12514 reached because of previous transformations. */
12519 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12521 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12525 arg1 = const_binop (PLUS_EXPR, arg1,
12526 build_int_cst (TREE_TYPE (arg1), 1), 0);
12527 return fold_build2 (EQ_EXPR, type,
12528 fold_convert (TREE_TYPE (arg1), arg0),
12531 arg1 = const_binop (PLUS_EXPR, arg1,
12532 build_int_cst (TREE_TYPE (arg1), 1), 0);
12533 return fold_build2 (NE_EXPR, type,
12534 fold_convert (TREE_TYPE (arg1), arg0),
12539 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12541 && TREE_INT_CST_LOW (arg1) == min_lo)
12545 return omit_one_operand (type, integer_zero_node, arg0);
12548 return fold_build2 (EQ_EXPR, type, op0, op1);
12551 return omit_one_operand (type, integer_one_node, arg0);
12554 return fold_build2 (NE_EXPR, type, op0, op1);
12559 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12561 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12565 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12566 return fold_build2 (NE_EXPR, type,
12567 fold_convert (TREE_TYPE (arg1), arg0),
12570 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12571 return fold_build2 (EQ_EXPR, type,
12572 fold_convert (TREE_TYPE (arg1), arg0),
12578 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12579 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12580 && TYPE_UNSIGNED (arg1_type)
12581 /* We will flip the signedness of the comparison operator
12582 associated with the mode of arg1, so the sign bit is
12583 specified by this mode. Check that arg1 is the signed
12584 max associated with this sign bit. */
12585 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12586 /* signed_type does not work on pointer types. */
12587 && INTEGRAL_TYPE_P (arg1_type))
12589 /* The following case also applies to X < signed_max+1
12590 and X >= signed_max+1 because previous transformations. */
12591 if (code == LE_EXPR || code == GT_EXPR)
12594 st = signed_type_for (TREE_TYPE (arg1));
12595 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12596 type, fold_convert (st, arg0),
12597 build_int_cst (st, 0));
12603 /* If we are comparing an ABS_EXPR with a constant, we can
12604 convert all the cases into explicit comparisons, but they may
12605 well not be faster than doing the ABS and one comparison.
12606 But ABS (X) <= C is a range comparison, which becomes a subtraction
12607 and a comparison, and is probably faster. */
12608 if (code == LE_EXPR
12609 && TREE_CODE (arg1) == INTEGER_CST
12610 && TREE_CODE (arg0) == ABS_EXPR
12611 && ! TREE_SIDE_EFFECTS (arg0)
12612 && (0 != (tem = negate_expr (arg1)))
12613 && TREE_CODE (tem) == INTEGER_CST
12614 && !TREE_OVERFLOW (tem))
12615 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12616 build2 (GE_EXPR, type,
12617 TREE_OPERAND (arg0, 0), tem),
12618 build2 (LE_EXPR, type,
12619 TREE_OPERAND (arg0, 0), arg1));
12621 /* Convert ABS_EXPR<x> >= 0 to true. */
12622 strict_overflow_p = false;
12623 if (code == GE_EXPR
12624 && (integer_zerop (arg1)
12625 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12626 && real_zerop (arg1)))
12627 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12629 if (strict_overflow_p)
12630 fold_overflow_warning (("assuming signed overflow does not occur "
12631 "when simplifying comparison of "
12632 "absolute value and zero"),
12633 WARN_STRICT_OVERFLOW_CONDITIONAL);
12634 return omit_one_operand (type, integer_one_node, arg0);
12637 /* Convert ABS_EXPR<x> < 0 to false. */
12638 strict_overflow_p = false;
12639 if (code == LT_EXPR
12640 && (integer_zerop (arg1) || real_zerop (arg1))
12641 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12643 if (strict_overflow_p)
12644 fold_overflow_warning (("assuming signed overflow does not occur "
12645 "when simplifying comparison of "
12646 "absolute value and zero"),
12647 WARN_STRICT_OVERFLOW_CONDITIONAL);
12648 return omit_one_operand (type, integer_zero_node, arg0);
12651 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12652 and similarly for >= into !=. */
12653 if ((code == LT_EXPR || code == GE_EXPR)
12654 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12655 && TREE_CODE (arg1) == LSHIFT_EXPR
12656 && integer_onep (TREE_OPERAND (arg1, 0)))
12657 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12658 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12659 TREE_OPERAND (arg1, 1)),
12660 build_int_cst (TREE_TYPE (arg0), 0));
12662 if ((code == LT_EXPR || code == GE_EXPR)
12663 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12664 && CONVERT_EXPR_P (arg1)
12665 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12666 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12668 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12669 fold_convert (TREE_TYPE (arg0),
12670 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12671 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12673 build_int_cst (TREE_TYPE (arg0), 0));
12677 case UNORDERED_EXPR:
12685 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12687 t1 = fold_relational_const (code, type, arg0, arg1);
12688 if (t1 != NULL_TREE)
12692 /* If the first operand is NaN, the result is constant. */
12693 if (TREE_CODE (arg0) == REAL_CST
12694 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12695 && (code != LTGT_EXPR || ! flag_trapping_math))
12697 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12698 ? integer_zero_node
12699 : integer_one_node;
12700 return omit_one_operand (type, t1, arg1);
12703 /* If the second operand is NaN, the result is constant. */
12704 if (TREE_CODE (arg1) == REAL_CST
12705 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12706 && (code != LTGT_EXPR || ! flag_trapping_math))
12708 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12709 ? integer_zero_node
12710 : integer_one_node;
12711 return omit_one_operand (type, t1, arg0);
12714 /* Simplify unordered comparison of something with itself. */
12715 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12716 && operand_equal_p (arg0, arg1, 0))
12717 return constant_boolean_node (1, type);
12719 if (code == LTGT_EXPR
12720 && !flag_trapping_math
12721 && operand_equal_p (arg0, arg1, 0))
12722 return constant_boolean_node (0, type);
12724 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12726 tree targ0 = strip_float_extensions (arg0);
12727 tree targ1 = strip_float_extensions (arg1);
12728 tree newtype = TREE_TYPE (targ0);
12730 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12731 newtype = TREE_TYPE (targ1);
12733 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12734 return fold_build2 (code, type, fold_convert (newtype, targ0),
12735 fold_convert (newtype, targ1));
12740 case COMPOUND_EXPR:
12741 /* When pedantic, a compound expression can be neither an lvalue
12742 nor an integer constant expression. */
12743 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12745 /* Don't let (0, 0) be null pointer constant. */
12746 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12747 : fold_convert (type, arg1);
12748 return pedantic_non_lvalue (tem);
12751 if ((TREE_CODE (arg0) == REAL_CST
12752 && TREE_CODE (arg1) == REAL_CST)
12753 || (TREE_CODE (arg0) == INTEGER_CST
12754 && TREE_CODE (arg1) == INTEGER_CST))
12755 return build_complex (type, arg0, arg1);
12759 /* An ASSERT_EXPR should never be passed to fold_binary. */
12760 gcc_unreachable ();
12764 } /* switch (code) */
12767 /* Callback for walk_tree, looking for LABEL_EXPR.
12768 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12769 Do not check the sub-tree of GOTO_EXPR. */
12772 contains_label_1 (tree *tp,
12773 int *walk_subtrees,
12774 void *data ATTRIBUTE_UNUSED)
12776 switch (TREE_CODE (*tp))
12781 *walk_subtrees = 0;
12788 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12789 accessible from outside the sub-tree. Returns NULL_TREE if no
12790 addressable label is found. */
12793 contains_label_p (tree st)
12795 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12798 /* Fold a ternary expression of code CODE and type TYPE with operands
12799 OP0, OP1, and OP2. Return the folded expression if folding is
12800 successful. Otherwise, return NULL_TREE. */
12803 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12806 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12807 enum tree_code_class kind = TREE_CODE_CLASS (code);
12809 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12810 && TREE_CODE_LENGTH (code) == 3);
12812 /* Strip any conversions that don't change the mode. This is safe
12813 for every expression, except for a comparison expression because
12814 its signedness is derived from its operands. So, in the latter
12815 case, only strip conversions that don't change the signedness.
12817 Note that this is done as an internal manipulation within the
12818 constant folder, in order to find the simplest representation of
12819 the arguments so that their form can be studied. In any cases,
12820 the appropriate type conversions should be put back in the tree
12821 that will get out of the constant folder. */
12836 case COMPONENT_REF:
12837 if (TREE_CODE (arg0) == CONSTRUCTOR
12838 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12840 unsigned HOST_WIDE_INT idx;
12842 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12849 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12850 so all simple results must be passed through pedantic_non_lvalue. */
12851 if (TREE_CODE (arg0) == INTEGER_CST)
12853 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12854 tem = integer_zerop (arg0) ? op2 : op1;
12855 /* Only optimize constant conditions when the selected branch
12856 has the same type as the COND_EXPR. This avoids optimizing
12857 away "c ? x : throw", where the throw has a void type.
12858 Avoid throwing away that operand which contains label. */
12859 if ((!TREE_SIDE_EFFECTS (unused_op)
12860 || !contains_label_p (unused_op))
12861 && (! VOID_TYPE_P (TREE_TYPE (tem))
12862 || VOID_TYPE_P (type)))
12863 return pedantic_non_lvalue (tem);
12866 if (operand_equal_p (arg1, op2, 0))
12867 return pedantic_omit_one_operand (type, arg1, arg0);
12869 /* If we have A op B ? A : C, we may be able to convert this to a
12870 simpler expression, depending on the operation and the values
12871 of B and C. Signed zeros prevent all of these transformations,
12872 for reasons given above each one.
12874 Also try swapping the arguments and inverting the conditional. */
12875 if (COMPARISON_CLASS_P (arg0)
12876 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12877 arg1, TREE_OPERAND (arg0, 1))
12878 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12880 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12885 if (COMPARISON_CLASS_P (arg0)
12886 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12888 TREE_OPERAND (arg0, 1))
12889 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12891 tem = fold_truth_not_expr (arg0);
12892 if (tem && COMPARISON_CLASS_P (tem))
12894 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12900 /* If the second operand is simpler than the third, swap them
12901 since that produces better jump optimization results. */
12902 if (truth_value_p (TREE_CODE (arg0))
12903 && tree_swap_operands_p (op1, op2, false))
12905 /* See if this can be inverted. If it can't, possibly because
12906 it was a floating-point inequality comparison, don't do
12908 tem = fold_truth_not_expr (arg0);
12910 return fold_build3 (code, type, tem, op2, op1);
12913 /* Convert A ? 1 : 0 to simply A. */
12914 if (integer_onep (op1)
12915 && integer_zerop (op2)
12916 /* If we try to convert OP0 to our type, the
12917 call to fold will try to move the conversion inside
12918 a COND, which will recurse. In that case, the COND_EXPR
12919 is probably the best choice, so leave it alone. */
12920 && type == TREE_TYPE (arg0))
12921 return pedantic_non_lvalue (arg0);
12923 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12924 over COND_EXPR in cases such as floating point comparisons. */
12925 if (integer_zerop (op1)
12926 && integer_onep (op2)
12927 && truth_value_p (TREE_CODE (arg0)))
12928 return pedantic_non_lvalue (fold_convert (type,
12929 invert_truthvalue (arg0)));
12931 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12932 if (TREE_CODE (arg0) == LT_EXPR
12933 && integer_zerop (TREE_OPERAND (arg0, 1))
12934 && integer_zerop (op2)
12935 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12937 /* sign_bit_p only checks ARG1 bits within A's precision.
12938 If <sign bit of A> has wider type than A, bits outside
12939 of A's precision in <sign bit of A> need to be checked.
12940 If they are all 0, this optimization needs to be done
12941 in unsigned A's type, if they are all 1 in signed A's type,
12942 otherwise this can't be done. */
12943 if (TYPE_PRECISION (TREE_TYPE (tem))
12944 < TYPE_PRECISION (TREE_TYPE (arg1))
12945 && TYPE_PRECISION (TREE_TYPE (tem))
12946 < TYPE_PRECISION (type))
12948 unsigned HOST_WIDE_INT mask_lo;
12949 HOST_WIDE_INT mask_hi;
12950 int inner_width, outer_width;
12953 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12954 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12955 if (outer_width > TYPE_PRECISION (type))
12956 outer_width = TYPE_PRECISION (type);
12958 if (outer_width > HOST_BITS_PER_WIDE_INT)
12960 mask_hi = ((unsigned HOST_WIDE_INT) -1
12961 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12967 mask_lo = ((unsigned HOST_WIDE_INT) -1
12968 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12970 if (inner_width > HOST_BITS_PER_WIDE_INT)
12972 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12973 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12977 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12978 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12980 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12981 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12983 tem_type = signed_type_for (TREE_TYPE (tem));
12984 tem = fold_convert (tem_type, tem);
12986 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12987 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12989 tem_type = unsigned_type_for (TREE_TYPE (tem));
12990 tem = fold_convert (tem_type, tem);
12997 return fold_convert (type,
12998 fold_build2 (BIT_AND_EXPR,
12999 TREE_TYPE (tem), tem,
13000 fold_convert (TREE_TYPE (tem),
13004 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13005 already handled above. */
13006 if (TREE_CODE (arg0) == BIT_AND_EXPR
13007 && integer_onep (TREE_OPERAND (arg0, 1))
13008 && integer_zerop (op2)
13009 && integer_pow2p (arg1))
13011 tree tem = TREE_OPERAND (arg0, 0);
13013 if (TREE_CODE (tem) == RSHIFT_EXPR
13014 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13015 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13016 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13017 return fold_build2 (BIT_AND_EXPR, type,
13018 TREE_OPERAND (tem, 0), arg1);
13021 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13022 is probably obsolete because the first operand should be a
13023 truth value (that's why we have the two cases above), but let's
13024 leave it in until we can confirm this for all front-ends. */
13025 if (integer_zerop (op2)
13026 && TREE_CODE (arg0) == NE_EXPR
13027 && integer_zerop (TREE_OPERAND (arg0, 1))
13028 && integer_pow2p (arg1)
13029 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13030 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13031 arg1, OEP_ONLY_CONST))
13032 return pedantic_non_lvalue (fold_convert (type,
13033 TREE_OPERAND (arg0, 0)));
13035 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13036 if (integer_zerop (op2)
13037 && truth_value_p (TREE_CODE (arg0))
13038 && truth_value_p (TREE_CODE (arg1)))
13039 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13040 fold_convert (type, arg0),
13043 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13044 if (integer_onep (op2)
13045 && truth_value_p (TREE_CODE (arg0))
13046 && truth_value_p (TREE_CODE (arg1)))
13048 /* Only perform transformation if ARG0 is easily inverted. */
13049 tem = fold_truth_not_expr (arg0);
13051 return fold_build2 (TRUTH_ORIF_EXPR, type,
13052 fold_convert (type, tem),
13056 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13057 if (integer_zerop (arg1)
13058 && truth_value_p (TREE_CODE (arg0))
13059 && truth_value_p (TREE_CODE (op2)))
13061 /* Only perform transformation if ARG0 is easily inverted. */
13062 tem = fold_truth_not_expr (arg0);
13064 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13065 fold_convert (type, tem),
13069 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13070 if (integer_onep (arg1)
13071 && truth_value_p (TREE_CODE (arg0))
13072 && truth_value_p (TREE_CODE (op2)))
13073 return fold_build2 (TRUTH_ORIF_EXPR, type,
13074 fold_convert (type, arg0),
13080 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13081 of fold_ternary on them. */
13082 gcc_unreachable ();
13084 case BIT_FIELD_REF:
13085 if ((TREE_CODE (arg0) == VECTOR_CST
13086 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13087 && type == TREE_TYPE (TREE_TYPE (arg0)))
13089 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13090 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13093 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13094 && (idx % width) == 0
13095 && (idx = idx / width)
13096 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13098 tree elements = NULL_TREE;
13100 if (TREE_CODE (arg0) == VECTOR_CST)
13101 elements = TREE_VECTOR_CST_ELTS (arg0);
13104 unsigned HOST_WIDE_INT idx;
13107 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13108 elements = tree_cons (NULL_TREE, value, elements);
13110 while (idx-- > 0 && elements)
13111 elements = TREE_CHAIN (elements);
13113 return TREE_VALUE (elements);
13115 return fold_convert (type, integer_zero_node);
13119 /* A bit-field-ref that referenced the full argument can be stripped. */
13120 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13121 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13122 && integer_zerop (op2))
13123 return fold_convert (type, arg0);
13129 } /* switch (code) */
13132 /* Perform constant folding and related simplification of EXPR.
13133 The related simplifications include x*1 => x, x*0 => 0, etc.,
13134 and application of the associative law.
13135 NOP_EXPR conversions may be removed freely (as long as we
13136 are careful not to change the type of the overall expression).
13137 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13138 but we can constant-fold them if they have constant operands. */
13140 #ifdef ENABLE_FOLD_CHECKING
13141 # define fold(x) fold_1 (x)
13142 static tree fold_1 (tree);
13148 const tree t = expr;
13149 enum tree_code code = TREE_CODE (t);
13150 enum tree_code_class kind = TREE_CODE_CLASS (code);
13153 /* Return right away if a constant. */
13154 if (kind == tcc_constant)
13157 /* CALL_EXPR-like objects with variable numbers of operands are
13158 treated specially. */
13159 if (kind == tcc_vl_exp)
13161 if (code == CALL_EXPR)
13163 tem = fold_call_expr (expr, false);
13164 return tem ? tem : expr;
13169 if (IS_EXPR_CODE_CLASS (kind))
13171 tree type = TREE_TYPE (t);
13172 tree op0, op1, op2;
13174 switch (TREE_CODE_LENGTH (code))
13177 op0 = TREE_OPERAND (t, 0);
13178 tem = fold_unary (code, type, op0);
13179 return tem ? tem : expr;
13181 op0 = TREE_OPERAND (t, 0);
13182 op1 = TREE_OPERAND (t, 1);
13183 tem = fold_binary (code, type, op0, op1);
13184 return tem ? tem : expr;
13186 op0 = TREE_OPERAND (t, 0);
13187 op1 = TREE_OPERAND (t, 1);
13188 op2 = TREE_OPERAND (t, 2);
13189 tem = fold_ternary (code, type, op0, op1, op2);
13190 return tem ? tem : expr;
13200 tree op0 = TREE_OPERAND (t, 0);
13201 tree op1 = TREE_OPERAND (t, 1);
13203 if (TREE_CODE (op1) == INTEGER_CST
13204 && TREE_CODE (op0) == CONSTRUCTOR
13205 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13207 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13208 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13209 unsigned HOST_WIDE_INT begin = 0;
13211 /* Find a matching index by means of a binary search. */
13212 while (begin != end)
13214 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13215 tree index = VEC_index (constructor_elt, elts, middle)->index;
13217 if (TREE_CODE (index) == INTEGER_CST
13218 && tree_int_cst_lt (index, op1))
13219 begin = middle + 1;
13220 else if (TREE_CODE (index) == INTEGER_CST
13221 && tree_int_cst_lt (op1, index))
13223 else if (TREE_CODE (index) == RANGE_EXPR
13224 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13225 begin = middle + 1;
13226 else if (TREE_CODE (index) == RANGE_EXPR
13227 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13230 return VEC_index (constructor_elt, elts, middle)->value;
13238 return fold (DECL_INITIAL (t));
13242 } /* switch (code) */
13245 #ifdef ENABLE_FOLD_CHECKING
13248 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13249 static void fold_check_failed (const_tree, const_tree);
13250 void print_fold_checksum (const_tree);
13252 /* When --enable-checking=fold, compute a digest of expr before
13253 and after actual fold call to see if fold did not accidentally
13254 change original expr. */
13260 struct md5_ctx ctx;
13261 unsigned char checksum_before[16], checksum_after[16];
13264 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13265 md5_init_ctx (&ctx);
13266 fold_checksum_tree (expr, &ctx, ht);
13267 md5_finish_ctx (&ctx, checksum_before);
13270 ret = fold_1 (expr);
13272 md5_init_ctx (&ctx);
13273 fold_checksum_tree (expr, &ctx, ht);
13274 md5_finish_ctx (&ctx, checksum_after);
13277 if (memcmp (checksum_before, checksum_after, 16))
13278 fold_check_failed (expr, ret);
13284 print_fold_checksum (const_tree expr)
13286 struct md5_ctx ctx;
13287 unsigned char checksum[16], cnt;
13290 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13291 md5_init_ctx (&ctx);
13292 fold_checksum_tree (expr, &ctx, ht);
13293 md5_finish_ctx (&ctx, checksum);
13295 for (cnt = 0; cnt < 16; ++cnt)
13296 fprintf (stderr, "%02x", checksum[cnt]);
13297 putc ('\n', stderr);
13301 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13303 internal_error ("fold check: original tree changed by fold");
13307 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13310 enum tree_code code;
13311 struct tree_function_decl buf;
13316 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13317 <= sizeof (struct tree_function_decl))
13318 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13321 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13325 code = TREE_CODE (expr);
13326 if (TREE_CODE_CLASS (code) == tcc_declaration
13327 && DECL_ASSEMBLER_NAME_SET_P (expr))
13329 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13330 memcpy ((char *) &buf, expr, tree_size (expr));
13331 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13332 expr = (tree) &buf;
13334 else if (TREE_CODE_CLASS (code) == tcc_type
13335 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13336 || TYPE_CACHED_VALUES_P (expr)
13337 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13339 /* Allow these fields to be modified. */
13341 memcpy ((char *) &buf, expr, tree_size (expr));
13342 expr = tmp = (tree) &buf;
13343 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13344 TYPE_POINTER_TO (tmp) = NULL;
13345 TYPE_REFERENCE_TO (tmp) = NULL;
13346 if (TYPE_CACHED_VALUES_P (tmp))
13348 TYPE_CACHED_VALUES_P (tmp) = 0;
13349 TYPE_CACHED_VALUES (tmp) = NULL;
13352 md5_process_bytes (expr, tree_size (expr), ctx);
13353 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13354 if (TREE_CODE_CLASS (code) != tcc_type
13355 && TREE_CODE_CLASS (code) != tcc_declaration
13356 && code != TREE_LIST
13357 && code != SSA_NAME)
13358 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13359 switch (TREE_CODE_CLASS (code))
13365 md5_process_bytes (TREE_STRING_POINTER (expr),
13366 TREE_STRING_LENGTH (expr), ctx);
13369 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13370 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13373 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13379 case tcc_exceptional:
13383 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13384 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13385 expr = TREE_CHAIN (expr);
13386 goto recursive_label;
13389 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13390 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13396 case tcc_expression:
13397 case tcc_reference:
13398 case tcc_comparison:
13401 case tcc_statement:
13403 len = TREE_OPERAND_LENGTH (expr);
13404 for (i = 0; i < len; ++i)
13405 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13407 case tcc_declaration:
13408 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13409 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13410 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13412 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13413 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13414 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13415 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13416 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13418 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13419 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13421 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13423 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13424 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13425 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13429 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13430 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13431 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13432 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13433 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13434 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13435 if (INTEGRAL_TYPE_P (expr)
13436 || SCALAR_FLOAT_TYPE_P (expr))
13438 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13439 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13441 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13442 if (TREE_CODE (expr) == RECORD_TYPE
13443 || TREE_CODE (expr) == UNION_TYPE
13444 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13445 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13446 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13453 /* Helper function for outputting the checksum of a tree T. When
13454 debugging with gdb, you can "define mynext" to be "next" followed
13455 by "call debug_fold_checksum (op0)", then just trace down till the
13459 debug_fold_checksum (const_tree t)
13462 unsigned char checksum[16];
13463 struct md5_ctx ctx;
13464 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13466 md5_init_ctx (&ctx);
13467 fold_checksum_tree (t, &ctx, ht);
13468 md5_finish_ctx (&ctx, checksum);
13471 for (i = 0; i < 16; i++)
13472 fprintf (stderr, "%d ", checksum[i]);
13474 fprintf (stderr, "\n");
13479 /* Fold a unary tree expression with code CODE of type TYPE with an
13480 operand OP0. Return a folded expression if successful. Otherwise,
13481 return a tree expression with code CODE of type TYPE with an
13485 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13488 #ifdef ENABLE_FOLD_CHECKING
13489 unsigned char checksum_before[16], checksum_after[16];
13490 struct md5_ctx ctx;
13493 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13494 md5_init_ctx (&ctx);
13495 fold_checksum_tree (op0, &ctx, ht);
13496 md5_finish_ctx (&ctx, checksum_before);
13500 tem = fold_unary (code, type, op0);
13502 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13504 #ifdef ENABLE_FOLD_CHECKING
13505 md5_init_ctx (&ctx);
13506 fold_checksum_tree (op0, &ctx, ht);
13507 md5_finish_ctx (&ctx, checksum_after);
13510 if (memcmp (checksum_before, checksum_after, 16))
13511 fold_check_failed (op0, tem);
13516 /* Fold a binary tree expression with code CODE of type TYPE with
13517 operands OP0 and OP1. Return a folded expression if successful.
13518 Otherwise, return a tree expression with code CODE of type TYPE
13519 with operands OP0 and OP1. */
13522 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13526 #ifdef ENABLE_FOLD_CHECKING
13527 unsigned char checksum_before_op0[16],
13528 checksum_before_op1[16],
13529 checksum_after_op0[16],
13530 checksum_after_op1[16];
13531 struct md5_ctx ctx;
13534 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13535 md5_init_ctx (&ctx);
13536 fold_checksum_tree (op0, &ctx, ht);
13537 md5_finish_ctx (&ctx, checksum_before_op0);
13540 md5_init_ctx (&ctx);
13541 fold_checksum_tree (op1, &ctx, ht);
13542 md5_finish_ctx (&ctx, checksum_before_op1);
13546 tem = fold_binary (code, type, op0, op1);
13548 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13550 #ifdef ENABLE_FOLD_CHECKING
13551 md5_init_ctx (&ctx);
13552 fold_checksum_tree (op0, &ctx, ht);
13553 md5_finish_ctx (&ctx, checksum_after_op0);
13556 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13557 fold_check_failed (op0, tem);
13559 md5_init_ctx (&ctx);
13560 fold_checksum_tree (op1, &ctx, ht);
13561 md5_finish_ctx (&ctx, checksum_after_op1);
13564 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13565 fold_check_failed (op1, tem);
13570 /* Fold a ternary tree expression with code CODE of type TYPE with
13571 operands OP0, OP1, and OP2. Return a folded expression if
13572 successful. Otherwise, return a tree expression with code CODE of
13573 type TYPE with operands OP0, OP1, and OP2. */
13576 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13580 #ifdef ENABLE_FOLD_CHECKING
13581 unsigned char checksum_before_op0[16],
13582 checksum_before_op1[16],
13583 checksum_before_op2[16],
13584 checksum_after_op0[16],
13585 checksum_after_op1[16],
13586 checksum_after_op2[16];
13587 struct md5_ctx ctx;
13590 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13591 md5_init_ctx (&ctx);
13592 fold_checksum_tree (op0, &ctx, ht);
13593 md5_finish_ctx (&ctx, checksum_before_op0);
13596 md5_init_ctx (&ctx);
13597 fold_checksum_tree (op1, &ctx, ht);
13598 md5_finish_ctx (&ctx, checksum_before_op1);
13601 md5_init_ctx (&ctx);
13602 fold_checksum_tree (op2, &ctx, ht);
13603 md5_finish_ctx (&ctx, checksum_before_op2);
13607 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13608 tem = fold_ternary (code, type, op0, op1, op2);
13610 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13612 #ifdef ENABLE_FOLD_CHECKING
13613 md5_init_ctx (&ctx);
13614 fold_checksum_tree (op0, &ctx, ht);
13615 md5_finish_ctx (&ctx, checksum_after_op0);
13618 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13619 fold_check_failed (op0, tem);
13621 md5_init_ctx (&ctx);
13622 fold_checksum_tree (op1, &ctx, ht);
13623 md5_finish_ctx (&ctx, checksum_after_op1);
13626 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13627 fold_check_failed (op1, tem);
13629 md5_init_ctx (&ctx);
13630 fold_checksum_tree (op2, &ctx, ht);
13631 md5_finish_ctx (&ctx, checksum_after_op2);
13634 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13635 fold_check_failed (op2, tem);
13640 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13641 arguments in ARGARRAY, and a null static chain.
13642 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13643 of type TYPE from the given operands as constructed by build_call_array. */
13646 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13649 #ifdef ENABLE_FOLD_CHECKING
13650 unsigned char checksum_before_fn[16],
13651 checksum_before_arglist[16],
13652 checksum_after_fn[16],
13653 checksum_after_arglist[16];
13654 struct md5_ctx ctx;
13658 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13659 md5_init_ctx (&ctx);
13660 fold_checksum_tree (fn, &ctx, ht);
13661 md5_finish_ctx (&ctx, checksum_before_fn);
13664 md5_init_ctx (&ctx);
13665 for (i = 0; i < nargs; i++)
13666 fold_checksum_tree (argarray[i], &ctx, ht);
13667 md5_finish_ctx (&ctx, checksum_before_arglist);
13671 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13673 #ifdef ENABLE_FOLD_CHECKING
13674 md5_init_ctx (&ctx);
13675 fold_checksum_tree (fn, &ctx, ht);
13676 md5_finish_ctx (&ctx, checksum_after_fn);
13679 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13680 fold_check_failed (fn, tem);
13682 md5_init_ctx (&ctx);
13683 for (i = 0; i < nargs; i++)
13684 fold_checksum_tree (argarray[i], &ctx, ht);
13685 md5_finish_ctx (&ctx, checksum_after_arglist);
13688 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13689 fold_check_failed (NULL_TREE, tem);
13694 /* Perform constant folding and related simplification of initializer
13695 expression EXPR. These behave identically to "fold_buildN" but ignore
13696 potential run-time traps and exceptions that fold must preserve. */
13698 #define START_FOLD_INIT \
13699 int saved_signaling_nans = flag_signaling_nans;\
13700 int saved_trapping_math = flag_trapping_math;\
13701 int saved_rounding_math = flag_rounding_math;\
13702 int saved_trapv = flag_trapv;\
13703 int saved_folding_initializer = folding_initializer;\
13704 flag_signaling_nans = 0;\
13705 flag_trapping_math = 0;\
13706 flag_rounding_math = 0;\
13708 folding_initializer = 1;
13710 #define END_FOLD_INIT \
13711 flag_signaling_nans = saved_signaling_nans;\
13712 flag_trapping_math = saved_trapping_math;\
13713 flag_rounding_math = saved_rounding_math;\
13714 flag_trapv = saved_trapv;\
13715 folding_initializer = saved_folding_initializer;
13718 fold_build1_initializer (enum tree_code code, tree type, tree op)
13723 result = fold_build1 (code, type, op);
13730 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13735 result = fold_build2 (code, type, op0, op1);
13742 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13748 result = fold_build3 (code, type, op0, op1, op2);
13755 fold_build_call_array_initializer (tree type, tree fn,
13756 int nargs, tree *argarray)
13761 result = fold_build_call_array (type, fn, nargs, argarray);
13767 #undef START_FOLD_INIT
13768 #undef END_FOLD_INIT
13770 /* Determine if first argument is a multiple of second argument. Return 0 if
13771 it is not, or we cannot easily determined it to be.
13773 An example of the sort of thing we care about (at this point; this routine
13774 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13775 fold cases do now) is discovering that
13777 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13783 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13785 This code also handles discovering that
13787 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13789 is a multiple of 8 so we don't have to worry about dealing with a
13790 possible remainder.
13792 Note that we *look* inside a SAVE_EXPR only to determine how it was
13793 calculated; it is not safe for fold to do much of anything else with the
13794 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13795 at run time. For example, the latter example above *cannot* be implemented
13796 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13797 evaluation time of the original SAVE_EXPR is not necessarily the same at
13798 the time the new expression is evaluated. The only optimization of this
13799 sort that would be valid is changing
13801 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13805 SAVE_EXPR (I) * SAVE_EXPR (J)
13807 (where the same SAVE_EXPR (J) is used in the original and the
13808 transformed version). */
13811 multiple_of_p (tree type, const_tree top, const_tree bottom)
13813 if (operand_equal_p (top, bottom, 0))
13816 if (TREE_CODE (type) != INTEGER_TYPE)
13819 switch (TREE_CODE (top))
13822 /* Bitwise and provides a power of two multiple. If the mask is
13823 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13824 if (!integer_pow2p (bottom))
13829 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13830 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13834 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13835 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13838 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13842 op1 = TREE_OPERAND (top, 1);
13843 /* const_binop may not detect overflow correctly,
13844 so check for it explicitly here. */
13845 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13846 > TREE_INT_CST_LOW (op1)
13847 && TREE_INT_CST_HIGH (op1) == 0
13848 && 0 != (t1 = fold_convert (type,
13849 const_binop (LSHIFT_EXPR,
13852 && !TREE_OVERFLOW (t1))
13853 return multiple_of_p (type, t1, bottom);
13858 /* Can't handle conversions from non-integral or wider integral type. */
13859 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13860 || (TYPE_PRECISION (type)
13861 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13864 /* .. fall through ... */
13867 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13870 if (TREE_CODE (bottom) != INTEGER_CST
13871 || integer_zerop (bottom)
13872 || (TYPE_UNSIGNED (type)
13873 && (tree_int_cst_sgn (top) < 0
13874 || tree_int_cst_sgn (bottom) < 0)))
13876 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13884 /* Return true if CODE or TYPE is known to be non-negative. */
13887 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13889 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13890 && truth_value_p (code))
13891 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13892 have a signed:1 type (where the value is -1 and 0). */
13897 /* Return true if (CODE OP0) is known to be non-negative. If the return
13898 value is based on the assumption that signed overflow is undefined,
13899 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13900 *STRICT_OVERFLOW_P. */
13903 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13904 bool *strict_overflow_p)
13906 if (TYPE_UNSIGNED (type))
13912 /* We can't return 1 if flag_wrapv is set because
13913 ABS_EXPR<INT_MIN> = INT_MIN. */
13914 if (!INTEGRAL_TYPE_P (type))
13916 if (TYPE_OVERFLOW_UNDEFINED (type))
13918 *strict_overflow_p = true;
13923 case NON_LVALUE_EXPR:
13925 case FIX_TRUNC_EXPR:
13926 return tree_expr_nonnegative_warnv_p (op0,
13927 strict_overflow_p);
13931 tree inner_type = TREE_TYPE (op0);
13932 tree outer_type = type;
13934 if (TREE_CODE (outer_type) == REAL_TYPE)
13936 if (TREE_CODE (inner_type) == REAL_TYPE)
13937 return tree_expr_nonnegative_warnv_p (op0,
13938 strict_overflow_p);
13939 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13941 if (TYPE_UNSIGNED (inner_type))
13943 return tree_expr_nonnegative_warnv_p (op0,
13944 strict_overflow_p);
13947 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13949 if (TREE_CODE (inner_type) == REAL_TYPE)
13950 return tree_expr_nonnegative_warnv_p (op0,
13951 strict_overflow_p);
13952 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13953 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13954 && TYPE_UNSIGNED (inner_type);
13960 return tree_simple_nonnegative_warnv_p (code, type);
13963 /* We don't know sign of `t', so be conservative and return false. */
13967 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13968 value is based on the assumption that signed overflow is undefined,
13969 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13970 *STRICT_OVERFLOW_P. */
13973 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13974 tree op1, bool *strict_overflow_p)
13976 if (TYPE_UNSIGNED (type))
13981 case POINTER_PLUS_EXPR:
13983 if (FLOAT_TYPE_P (type))
13984 return (tree_expr_nonnegative_warnv_p (op0,
13986 && tree_expr_nonnegative_warnv_p (op1,
13987 strict_overflow_p));
13989 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13990 both unsigned and at least 2 bits shorter than the result. */
13991 if (TREE_CODE (type) == INTEGER_TYPE
13992 && TREE_CODE (op0) == NOP_EXPR
13993 && TREE_CODE (op1) == NOP_EXPR)
13995 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13996 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13997 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13998 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14000 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14001 TYPE_PRECISION (inner2)) + 1;
14002 return prec < TYPE_PRECISION (type);
14008 if (FLOAT_TYPE_P (type))
14010 /* x * x for floating point x is always non-negative. */
14011 if (operand_equal_p (op0, op1, 0))
14013 return (tree_expr_nonnegative_warnv_p (op0,
14015 && tree_expr_nonnegative_warnv_p (op1,
14016 strict_overflow_p));
14019 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14020 both unsigned and their total bits is shorter than the result. */
14021 if (TREE_CODE (type) == INTEGER_TYPE
14022 && TREE_CODE (op0) == NOP_EXPR
14023 && TREE_CODE (op1) == NOP_EXPR)
14025 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14026 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14027 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14028 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14029 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14030 < TYPE_PRECISION (type);
14036 return (tree_expr_nonnegative_warnv_p (op0,
14038 || tree_expr_nonnegative_warnv_p (op1,
14039 strict_overflow_p));
14045 case TRUNC_DIV_EXPR:
14046 case CEIL_DIV_EXPR:
14047 case FLOOR_DIV_EXPR:
14048 case ROUND_DIV_EXPR:
14049 return (tree_expr_nonnegative_warnv_p (op0,
14051 && tree_expr_nonnegative_warnv_p (op1,
14052 strict_overflow_p));
14054 case TRUNC_MOD_EXPR:
14055 case CEIL_MOD_EXPR:
14056 case FLOOR_MOD_EXPR:
14057 case ROUND_MOD_EXPR:
14058 return tree_expr_nonnegative_warnv_p (op0,
14059 strict_overflow_p);
14061 return tree_simple_nonnegative_warnv_p (code, type);
14064 /* We don't know sign of `t', so be conservative and return false. */
14068 /* Return true if T is known to be non-negative. If the return
14069 value is based on the assumption that signed overflow is undefined,
14070 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14071 *STRICT_OVERFLOW_P. */
14074 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14076 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14079 switch (TREE_CODE (t))
14082 return tree_int_cst_sgn (t) >= 0;
14085 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14088 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14091 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14093 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14094 strict_overflow_p));
14096 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14099 /* We don't know sign of `t', so be conservative and return false. */
14103 /* Return true if T is known to be non-negative. If the return
14104 value is based on the assumption that signed overflow is undefined,
14105 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14106 *STRICT_OVERFLOW_P. */
14109 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14110 tree arg0, tree arg1, bool *strict_overflow_p)
14112 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14113 switch (DECL_FUNCTION_CODE (fndecl))
14115 CASE_FLT_FN (BUILT_IN_ACOS):
14116 CASE_FLT_FN (BUILT_IN_ACOSH):
14117 CASE_FLT_FN (BUILT_IN_CABS):
14118 CASE_FLT_FN (BUILT_IN_COSH):
14119 CASE_FLT_FN (BUILT_IN_ERFC):
14120 CASE_FLT_FN (BUILT_IN_EXP):
14121 CASE_FLT_FN (BUILT_IN_EXP10):
14122 CASE_FLT_FN (BUILT_IN_EXP2):
14123 CASE_FLT_FN (BUILT_IN_FABS):
14124 CASE_FLT_FN (BUILT_IN_FDIM):
14125 CASE_FLT_FN (BUILT_IN_HYPOT):
14126 CASE_FLT_FN (BUILT_IN_POW10):
14127 CASE_INT_FN (BUILT_IN_FFS):
14128 CASE_INT_FN (BUILT_IN_PARITY):
14129 CASE_INT_FN (BUILT_IN_POPCOUNT):
14130 case BUILT_IN_BSWAP32:
14131 case BUILT_IN_BSWAP64:
14135 CASE_FLT_FN (BUILT_IN_SQRT):
14136 /* sqrt(-0.0) is -0.0. */
14137 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14139 return tree_expr_nonnegative_warnv_p (arg0,
14140 strict_overflow_p);
14142 CASE_FLT_FN (BUILT_IN_ASINH):
14143 CASE_FLT_FN (BUILT_IN_ATAN):
14144 CASE_FLT_FN (BUILT_IN_ATANH):
14145 CASE_FLT_FN (BUILT_IN_CBRT):
14146 CASE_FLT_FN (BUILT_IN_CEIL):
14147 CASE_FLT_FN (BUILT_IN_ERF):
14148 CASE_FLT_FN (BUILT_IN_EXPM1):
14149 CASE_FLT_FN (BUILT_IN_FLOOR):
14150 CASE_FLT_FN (BUILT_IN_FMOD):
14151 CASE_FLT_FN (BUILT_IN_FREXP):
14152 CASE_FLT_FN (BUILT_IN_LCEIL):
14153 CASE_FLT_FN (BUILT_IN_LDEXP):
14154 CASE_FLT_FN (BUILT_IN_LFLOOR):
14155 CASE_FLT_FN (BUILT_IN_LLCEIL):
14156 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14157 CASE_FLT_FN (BUILT_IN_LLRINT):
14158 CASE_FLT_FN (BUILT_IN_LLROUND):
14159 CASE_FLT_FN (BUILT_IN_LRINT):
14160 CASE_FLT_FN (BUILT_IN_LROUND):
14161 CASE_FLT_FN (BUILT_IN_MODF):
14162 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14163 CASE_FLT_FN (BUILT_IN_RINT):
14164 CASE_FLT_FN (BUILT_IN_ROUND):
14165 CASE_FLT_FN (BUILT_IN_SCALB):
14166 CASE_FLT_FN (BUILT_IN_SCALBLN):
14167 CASE_FLT_FN (BUILT_IN_SCALBN):
14168 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14169 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14170 CASE_FLT_FN (BUILT_IN_SINH):
14171 CASE_FLT_FN (BUILT_IN_TANH):
14172 CASE_FLT_FN (BUILT_IN_TRUNC):
14173 /* True if the 1st argument is nonnegative. */
14174 return tree_expr_nonnegative_warnv_p (arg0,
14175 strict_overflow_p);
14177 CASE_FLT_FN (BUILT_IN_FMAX):
14178 /* True if the 1st OR 2nd arguments are nonnegative. */
14179 return (tree_expr_nonnegative_warnv_p (arg0,
14181 || (tree_expr_nonnegative_warnv_p (arg1,
14182 strict_overflow_p)));
14184 CASE_FLT_FN (BUILT_IN_FMIN):
14185 /* True if the 1st AND 2nd arguments are nonnegative. */
14186 return (tree_expr_nonnegative_warnv_p (arg0,
14188 && (tree_expr_nonnegative_warnv_p (arg1,
14189 strict_overflow_p)));
14191 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14192 /* True if the 2nd argument is nonnegative. */
14193 return tree_expr_nonnegative_warnv_p (arg1,
14194 strict_overflow_p);
14196 CASE_FLT_FN (BUILT_IN_POWI):
14197 /* True if the 1st argument is nonnegative or the second
14198 argument is an even integer. */
14199 if (TREE_CODE (arg1) == INTEGER_CST
14200 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14202 return tree_expr_nonnegative_warnv_p (arg0,
14203 strict_overflow_p);
14205 CASE_FLT_FN (BUILT_IN_POW):
14206 /* True if the 1st argument is nonnegative or the second
14207 argument is an even integer valued real. */
14208 if (TREE_CODE (arg1) == REAL_CST)
14213 c = TREE_REAL_CST (arg1);
14214 n = real_to_integer (&c);
14217 REAL_VALUE_TYPE cint;
14218 real_from_integer (&cint, VOIDmode, n,
14219 n < 0 ? -1 : 0, 0);
14220 if (real_identical (&c, &cint))
14224 return tree_expr_nonnegative_warnv_p (arg0,
14225 strict_overflow_p);
14230 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14234 /* Return true if T is known to be non-negative. If the return
14235 value is based on the assumption that signed overflow is undefined,
14236 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14237 *STRICT_OVERFLOW_P. */
14240 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14242 enum tree_code code = TREE_CODE (t);
14243 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14250 tree temp = TARGET_EXPR_SLOT (t);
14251 t = TARGET_EXPR_INITIAL (t);
14253 /* If the initializer is non-void, then it's a normal expression
14254 that will be assigned to the slot. */
14255 if (!VOID_TYPE_P (t))
14256 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14258 /* Otherwise, the initializer sets the slot in some way. One common
14259 way is an assignment statement at the end of the initializer. */
14262 if (TREE_CODE (t) == BIND_EXPR)
14263 t = expr_last (BIND_EXPR_BODY (t));
14264 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14265 || TREE_CODE (t) == TRY_CATCH_EXPR)
14266 t = expr_last (TREE_OPERAND (t, 0));
14267 else if (TREE_CODE (t) == STATEMENT_LIST)
14272 if (TREE_CODE (t) == MODIFY_EXPR
14273 && TREE_OPERAND (t, 0) == temp)
14274 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14275 strict_overflow_p);
14282 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14283 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14285 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14286 get_callee_fndecl (t),
14289 strict_overflow_p);
14291 case COMPOUND_EXPR:
14293 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14294 strict_overflow_p);
14296 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14297 strict_overflow_p);
14299 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14300 strict_overflow_p);
14303 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14307 /* We don't know sign of `t', so be conservative and return false. */
14311 /* Return true if T is known to be non-negative. If the return
14312 value is based on the assumption that signed overflow is undefined,
14313 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14314 *STRICT_OVERFLOW_P. */
14317 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14319 enum tree_code code;
14320 if (t == error_mark_node)
14323 code = TREE_CODE (t);
14324 switch (TREE_CODE_CLASS (code))
14327 case tcc_comparison:
14328 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14330 TREE_OPERAND (t, 0),
14331 TREE_OPERAND (t, 1),
14332 strict_overflow_p);
14335 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14337 TREE_OPERAND (t, 0),
14338 strict_overflow_p);
14341 case tcc_declaration:
14342 case tcc_reference:
14343 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14351 case TRUTH_AND_EXPR:
14352 case TRUTH_OR_EXPR:
14353 case TRUTH_XOR_EXPR:
14354 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14356 TREE_OPERAND (t, 0),
14357 TREE_OPERAND (t, 1),
14358 strict_overflow_p);
14359 case TRUTH_NOT_EXPR:
14360 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14362 TREE_OPERAND (t, 0),
14363 strict_overflow_p);
14370 case WITH_SIZE_EXPR:
14374 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14377 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14381 /* Return true if `t' is known to be non-negative. Handle warnings
14382 about undefined signed overflow. */
14385 tree_expr_nonnegative_p (tree t)
14387 bool ret, strict_overflow_p;
14389 strict_overflow_p = false;
14390 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14391 if (strict_overflow_p)
14392 fold_overflow_warning (("assuming signed overflow does not occur when "
14393 "determining that expression is always "
14395 WARN_STRICT_OVERFLOW_MISC);
14400 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14401 For floating point we further ensure that T is not denormal.
14402 Similar logic is present in nonzero_address in rtlanal.h.
14404 If the return value is based on the assumption that signed overflow
14405 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14406 change *STRICT_OVERFLOW_P. */
14409 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14410 bool *strict_overflow_p)
14415 return tree_expr_nonzero_warnv_p (op0,
14416 strict_overflow_p);
14420 tree inner_type = TREE_TYPE (op0);
14421 tree outer_type = type;
14423 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14424 && tree_expr_nonzero_warnv_p (op0,
14425 strict_overflow_p));
14429 case NON_LVALUE_EXPR:
14430 return tree_expr_nonzero_warnv_p (op0,
14431 strict_overflow_p);
14440 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14441 For floating point we further ensure that T is not denormal.
14442 Similar logic is present in nonzero_address in rtlanal.h.
14444 If the return value is based on the assumption that signed overflow
14445 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14446 change *STRICT_OVERFLOW_P. */
14449 tree_binary_nonzero_warnv_p (enum tree_code code,
14452 tree op1, bool *strict_overflow_p)
14454 bool sub_strict_overflow_p;
14457 case POINTER_PLUS_EXPR:
14459 if (TYPE_OVERFLOW_UNDEFINED (type))
14461 /* With the presence of negative values it is hard
14462 to say something. */
14463 sub_strict_overflow_p = false;
14464 if (!tree_expr_nonnegative_warnv_p (op0,
14465 &sub_strict_overflow_p)
14466 || !tree_expr_nonnegative_warnv_p (op1,
14467 &sub_strict_overflow_p))
14469 /* One of operands must be positive and the other non-negative. */
14470 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14471 overflows, on a twos-complement machine the sum of two
14472 nonnegative numbers can never be zero. */
14473 return (tree_expr_nonzero_warnv_p (op0,
14475 || tree_expr_nonzero_warnv_p (op1,
14476 strict_overflow_p));
14481 if (TYPE_OVERFLOW_UNDEFINED (type))
14483 if (tree_expr_nonzero_warnv_p (op0,
14485 && tree_expr_nonzero_warnv_p (op1,
14486 strict_overflow_p))
14488 *strict_overflow_p = true;
14495 sub_strict_overflow_p = false;
14496 if (tree_expr_nonzero_warnv_p (op0,
14497 &sub_strict_overflow_p)
14498 && tree_expr_nonzero_warnv_p (op1,
14499 &sub_strict_overflow_p))
14501 if (sub_strict_overflow_p)
14502 *strict_overflow_p = true;
14507 sub_strict_overflow_p = false;
14508 if (tree_expr_nonzero_warnv_p (op0,
14509 &sub_strict_overflow_p))
14511 if (sub_strict_overflow_p)
14512 *strict_overflow_p = true;
14514 /* When both operands are nonzero, then MAX must be too. */
14515 if (tree_expr_nonzero_warnv_p (op1,
14516 strict_overflow_p))
14519 /* MAX where operand 0 is positive is positive. */
14520 return tree_expr_nonnegative_warnv_p (op0,
14521 strict_overflow_p);
14523 /* MAX where operand 1 is positive is positive. */
14524 else if (tree_expr_nonzero_warnv_p (op1,
14525 &sub_strict_overflow_p)
14526 && tree_expr_nonnegative_warnv_p (op1,
14527 &sub_strict_overflow_p))
14529 if (sub_strict_overflow_p)
14530 *strict_overflow_p = true;
14536 return (tree_expr_nonzero_warnv_p (op1,
14538 || tree_expr_nonzero_warnv_p (op0,
14539 strict_overflow_p));
14548 /* Return true when T is an address and is known to be nonzero.
14549 For floating point we further ensure that T is not denormal.
14550 Similar logic is present in nonzero_address in rtlanal.h.
14552 If the return value is based on the assumption that signed overflow
14553 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14554 change *STRICT_OVERFLOW_P. */
14557 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14559 bool sub_strict_overflow_p;
14560 switch (TREE_CODE (t))
14563 return !integer_zerop (t);
14567 tree base = get_base_address (TREE_OPERAND (t, 0));
14572 /* Weak declarations may link to NULL. */
14573 if (VAR_OR_FUNCTION_DECL_P (base))
14574 return !DECL_WEAK (base);
14576 /* Constants are never weak. */
14577 if (CONSTANT_CLASS_P (base))
14584 sub_strict_overflow_p = false;
14585 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14586 &sub_strict_overflow_p)
14587 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14588 &sub_strict_overflow_p))
14590 if (sub_strict_overflow_p)
14591 *strict_overflow_p = true;
14602 /* Return true when T is an address and is known to be nonzero.
14603 For floating point we further ensure that T is not denormal.
14604 Similar logic is present in nonzero_address in rtlanal.h.
14606 If the return value is based on the assumption that signed overflow
14607 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14608 change *STRICT_OVERFLOW_P. */
14611 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14613 tree type = TREE_TYPE (t);
14614 enum tree_code code;
14616 /* Doing something useful for floating point would need more work. */
14617 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14620 code = TREE_CODE (t);
14621 switch (TREE_CODE_CLASS (code))
14624 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14625 strict_overflow_p);
14627 case tcc_comparison:
14628 return tree_binary_nonzero_warnv_p (code, type,
14629 TREE_OPERAND (t, 0),
14630 TREE_OPERAND (t, 1),
14631 strict_overflow_p);
14633 case tcc_declaration:
14634 case tcc_reference:
14635 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14643 case TRUTH_NOT_EXPR:
14644 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14645 strict_overflow_p);
14647 case TRUTH_AND_EXPR:
14648 case TRUTH_OR_EXPR:
14649 case TRUTH_XOR_EXPR:
14650 return tree_binary_nonzero_warnv_p (code, type,
14651 TREE_OPERAND (t, 0),
14652 TREE_OPERAND (t, 1),
14653 strict_overflow_p);
14660 case WITH_SIZE_EXPR:
14664 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14666 case COMPOUND_EXPR:
14669 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14670 strict_overflow_p);
14673 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14674 strict_overflow_p);
14677 return alloca_call_p (t);
14685 /* Return true when T is an address and is known to be nonzero.
14686 Handle warnings about undefined signed overflow. */
14689 tree_expr_nonzero_p (tree t)
14691 bool ret, strict_overflow_p;
14693 strict_overflow_p = false;
14694 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14695 if (strict_overflow_p)
14696 fold_overflow_warning (("assuming signed overflow does not occur when "
14697 "determining that expression is always "
14699 WARN_STRICT_OVERFLOW_MISC);
14703 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14704 attempt to fold the expression to a constant without modifying TYPE,
14707 If the expression could be simplified to a constant, then return
14708 the constant. If the expression would not be simplified to a
14709 constant, then return NULL_TREE. */
14712 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14714 tree tem = fold_binary (code, type, op0, op1);
14715 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14718 /* Given the components of a unary expression CODE, TYPE and OP0,
14719 attempt to fold the expression to a constant without modifying
14722 If the expression could be simplified to a constant, then return
14723 the constant. If the expression would not be simplified to a
14724 constant, then return NULL_TREE. */
14727 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14729 tree tem = fold_unary (code, type, op0);
14730 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14733 /* If EXP represents referencing an element in a constant string
14734 (either via pointer arithmetic or array indexing), return the
14735 tree representing the value accessed, otherwise return NULL. */
14738 fold_read_from_constant_string (tree exp)
14740 if ((TREE_CODE (exp) == INDIRECT_REF
14741 || TREE_CODE (exp) == ARRAY_REF)
14742 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14744 tree exp1 = TREE_OPERAND (exp, 0);
14748 if (TREE_CODE (exp) == INDIRECT_REF)
14749 string = string_constant (exp1, &index);
14752 tree low_bound = array_ref_low_bound (exp);
14753 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14755 /* Optimize the special-case of a zero lower bound.
14757 We convert the low_bound to sizetype to avoid some problems
14758 with constant folding. (E.g. suppose the lower bound is 1,
14759 and its mode is QI. Without the conversion,l (ARRAY
14760 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14761 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14762 if (! integer_zerop (low_bound))
14763 index = size_diffop (index, fold_convert (sizetype, low_bound));
14769 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14770 && TREE_CODE (string) == STRING_CST
14771 && TREE_CODE (index) == INTEGER_CST
14772 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14773 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14775 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14776 return build_int_cst_type (TREE_TYPE (exp),
14777 (TREE_STRING_POINTER (string)
14778 [TREE_INT_CST_LOW (index)]));
14783 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14784 an integer constant, real, or fixed-point constant.
14786 TYPE is the type of the result. */
14789 fold_negate_const (tree arg0, tree type)
14791 tree t = NULL_TREE;
14793 switch (TREE_CODE (arg0))
14797 unsigned HOST_WIDE_INT low;
14798 HOST_WIDE_INT high;
14799 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14800 TREE_INT_CST_HIGH (arg0),
14802 t = force_fit_type_double (type, low, high, 1,
14803 (overflow | TREE_OVERFLOW (arg0))
14804 && !TYPE_UNSIGNED (type));
14809 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14814 FIXED_VALUE_TYPE f;
14815 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14816 &(TREE_FIXED_CST (arg0)), NULL,
14817 TYPE_SATURATING (type));
14818 t = build_fixed (type, f);
14819 /* Propagate overflow flags. */
14820 if (overflow_p | TREE_OVERFLOW (arg0))
14822 TREE_OVERFLOW (t) = 1;
14823 TREE_CONSTANT_OVERFLOW (t) = 1;
14825 else if (TREE_CONSTANT_OVERFLOW (arg0))
14826 TREE_CONSTANT_OVERFLOW (t) = 1;
14831 gcc_unreachable ();
14837 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14838 an integer constant or real constant.
14840 TYPE is the type of the result. */
14843 fold_abs_const (tree arg0, tree type)
14845 tree t = NULL_TREE;
14847 switch (TREE_CODE (arg0))
14850 /* If the value is unsigned, then the absolute value is
14851 the same as the ordinary value. */
14852 if (TYPE_UNSIGNED (type))
14854 /* Similarly, if the value is non-negative. */
14855 else if (INT_CST_LT (integer_minus_one_node, arg0))
14857 /* If the value is negative, then the absolute value is
14861 unsigned HOST_WIDE_INT low;
14862 HOST_WIDE_INT high;
14863 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14864 TREE_INT_CST_HIGH (arg0),
14866 t = force_fit_type_double (type, low, high, -1,
14867 overflow | TREE_OVERFLOW (arg0));
14872 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14873 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14879 gcc_unreachable ();
14885 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14886 constant. TYPE is the type of the result. */
14889 fold_not_const (tree arg0, tree type)
14891 tree t = NULL_TREE;
14893 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14895 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14896 ~TREE_INT_CST_HIGH (arg0), 0,
14897 TREE_OVERFLOW (arg0));
14902 /* Given CODE, a relational operator, the target type, TYPE and two
14903 constant operands OP0 and OP1, return the result of the
14904 relational operation. If the result is not a compile time
14905 constant, then return NULL_TREE. */
14908 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14910 int result, invert;
14912 /* From here on, the only cases we handle are when the result is
14913 known to be a constant. */
14915 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14917 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14918 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14920 /* Handle the cases where either operand is a NaN. */
14921 if (real_isnan (c0) || real_isnan (c1))
14931 case UNORDERED_EXPR:
14945 if (flag_trapping_math)
14951 gcc_unreachable ();
14954 return constant_boolean_node (result, type);
14957 return constant_boolean_node (real_compare (code, c0, c1), type);
14960 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14962 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14963 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14964 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14967 /* Handle equality/inequality of complex constants. */
14968 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14970 tree rcond = fold_relational_const (code, type,
14971 TREE_REALPART (op0),
14972 TREE_REALPART (op1));
14973 tree icond = fold_relational_const (code, type,
14974 TREE_IMAGPART (op0),
14975 TREE_IMAGPART (op1));
14976 if (code == EQ_EXPR)
14977 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14978 else if (code == NE_EXPR)
14979 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14984 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14986 To compute GT, swap the arguments and do LT.
14987 To compute GE, do LT and invert the result.
14988 To compute LE, swap the arguments, do LT and invert the result.
14989 To compute NE, do EQ and invert the result.
14991 Therefore, the code below must handle only EQ and LT. */
14993 if (code == LE_EXPR || code == GT_EXPR)
14998 code = swap_tree_comparison (code);
15001 /* Note that it is safe to invert for real values here because we
15002 have already handled the one case that it matters. */
15005 if (code == NE_EXPR || code == GE_EXPR)
15008 code = invert_tree_comparison (code, false);
15011 /* Compute a result for LT or EQ if args permit;
15012 Otherwise return T. */
15013 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15015 if (code == EQ_EXPR)
15016 result = tree_int_cst_equal (op0, op1);
15017 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15018 result = INT_CST_LT_UNSIGNED (op0, op1);
15020 result = INT_CST_LT (op0, op1);
15027 return constant_boolean_node (result, type);
15030 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15031 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15035 fold_build_cleanup_point_expr (tree type, tree expr)
15037 /* If the expression does not have side effects then we don't have to wrap
15038 it with a cleanup point expression. */
15039 if (!TREE_SIDE_EFFECTS (expr))
15042 /* If the expression is a return, check to see if the expression inside the
15043 return has no side effects or the right hand side of the modify expression
15044 inside the return. If either don't have side effects set we don't need to
15045 wrap the expression in a cleanup point expression. Note we don't check the
15046 left hand side of the modify because it should always be a return decl. */
15047 if (TREE_CODE (expr) == RETURN_EXPR)
15049 tree op = TREE_OPERAND (expr, 0);
15050 if (!op || !TREE_SIDE_EFFECTS (op))
15052 op = TREE_OPERAND (op, 1);
15053 if (!TREE_SIDE_EFFECTS (op))
15057 return build1 (CLEANUP_POINT_EXPR, type, expr);
15060 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15061 of an indirection through OP0, or NULL_TREE if no simplification is
15065 fold_indirect_ref_1 (tree type, tree op0)
15071 subtype = TREE_TYPE (sub);
15072 if (!POINTER_TYPE_P (subtype))
15075 if (TREE_CODE (sub) == ADDR_EXPR)
15077 tree op = TREE_OPERAND (sub, 0);
15078 tree optype = TREE_TYPE (op);
15079 /* *&CONST_DECL -> to the value of the const decl. */
15080 if (TREE_CODE (op) == CONST_DECL)
15081 return DECL_INITIAL (op);
15082 /* *&p => p; make sure to handle *&"str"[cst] here. */
15083 if (type == optype)
15085 tree fop = fold_read_from_constant_string (op);
15091 /* *(foo *)&fooarray => fooarray[0] */
15092 else if (TREE_CODE (optype) == ARRAY_TYPE
15093 && type == TREE_TYPE (optype))
15095 tree type_domain = TYPE_DOMAIN (optype);
15096 tree min_val = size_zero_node;
15097 if (type_domain && TYPE_MIN_VALUE (type_domain))
15098 min_val = TYPE_MIN_VALUE (type_domain);
15099 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15101 /* *(foo *)&complexfoo => __real__ complexfoo */
15102 else if (TREE_CODE (optype) == COMPLEX_TYPE
15103 && type == TREE_TYPE (optype))
15104 return fold_build1 (REALPART_EXPR, type, op);
15105 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15106 else if (TREE_CODE (optype) == VECTOR_TYPE
15107 && type == TREE_TYPE (optype))
15109 tree part_width = TYPE_SIZE (type);
15110 tree index = bitsize_int (0);
15111 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15115 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15116 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15117 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15119 tree op00 = TREE_OPERAND (sub, 0);
15120 tree op01 = TREE_OPERAND (sub, 1);
15124 op00type = TREE_TYPE (op00);
15125 if (TREE_CODE (op00) == ADDR_EXPR
15126 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15127 && type == TREE_TYPE (TREE_TYPE (op00type)))
15129 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15130 tree part_width = TYPE_SIZE (type);
15131 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15132 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15133 tree index = bitsize_int (indexi);
15135 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15136 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15137 part_width, index);
15143 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15144 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15145 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15147 tree op00 = TREE_OPERAND (sub, 0);
15148 tree op01 = TREE_OPERAND (sub, 1);
15152 op00type = TREE_TYPE (op00);
15153 if (TREE_CODE (op00) == ADDR_EXPR
15154 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15155 && type == TREE_TYPE (TREE_TYPE (op00type)))
15157 tree size = TYPE_SIZE_UNIT (type);
15158 if (tree_int_cst_equal (size, op01))
15159 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15163 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15164 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15165 && type == TREE_TYPE (TREE_TYPE (subtype)))
15168 tree min_val = size_zero_node;
15169 sub = build_fold_indirect_ref (sub);
15170 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15171 if (type_domain && TYPE_MIN_VALUE (type_domain))
15172 min_val = TYPE_MIN_VALUE (type_domain);
15173 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15179 /* Builds an expression for an indirection through T, simplifying some
15183 build_fold_indirect_ref (tree t)
15185 tree type = TREE_TYPE (TREE_TYPE (t));
15186 tree sub = fold_indirect_ref_1 (type, t);
15191 return build1 (INDIRECT_REF, type, t);
15194 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15197 fold_indirect_ref (tree t)
15199 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15207 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15208 whose result is ignored. The type of the returned tree need not be
15209 the same as the original expression. */
15212 fold_ignored_result (tree t)
15214 if (!TREE_SIDE_EFFECTS (t))
15215 return integer_zero_node;
15218 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15221 t = TREE_OPERAND (t, 0);
15225 case tcc_comparison:
15226 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15227 t = TREE_OPERAND (t, 0);
15228 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15229 t = TREE_OPERAND (t, 1);
15234 case tcc_expression:
15235 switch (TREE_CODE (t))
15237 case COMPOUND_EXPR:
15238 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15240 t = TREE_OPERAND (t, 0);
15244 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15245 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15247 t = TREE_OPERAND (t, 0);
15260 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15261 This can only be applied to objects of a sizetype. */
15264 round_up (tree value, int divisor)
15266 tree div = NULL_TREE;
15268 gcc_assert (divisor > 0);
15272 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15273 have to do anything. Only do this when we are not given a const,
15274 because in that case, this check is more expensive than just
15276 if (TREE_CODE (value) != INTEGER_CST)
15278 div = build_int_cst (TREE_TYPE (value), divisor);
15280 if (multiple_of_p (TREE_TYPE (value), value, div))
15284 /* If divisor is a power of two, simplify this to bit manipulation. */
15285 if (divisor == (divisor & -divisor))
15287 if (TREE_CODE (value) == INTEGER_CST)
15289 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15290 unsigned HOST_WIDE_INT high;
15293 if ((low & (divisor - 1)) == 0)
15296 overflow_p = TREE_OVERFLOW (value);
15297 high = TREE_INT_CST_HIGH (value);
15298 low &= ~(divisor - 1);
15307 return force_fit_type_double (TREE_TYPE (value), low, high,
15314 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15315 value = size_binop (PLUS_EXPR, value, t);
15316 t = build_int_cst (TREE_TYPE (value), -divisor);
15317 value = size_binop (BIT_AND_EXPR, value, t);
15323 div = build_int_cst (TREE_TYPE (value), divisor);
15324 value = size_binop (CEIL_DIV_EXPR, value, div);
15325 value = size_binop (MULT_EXPR, value, div);
15331 /* Likewise, but round down. */
15334 round_down (tree value, int divisor)
15336 tree div = NULL_TREE;
15338 gcc_assert (divisor > 0);
15342 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15343 have to do anything. Only do this when we are not given a const,
15344 because in that case, this check is more expensive than just
15346 if (TREE_CODE (value) != INTEGER_CST)
15348 div = build_int_cst (TREE_TYPE (value), divisor);
15350 if (multiple_of_p (TREE_TYPE (value), value, div))
15354 /* If divisor is a power of two, simplify this to bit manipulation. */
15355 if (divisor == (divisor & -divisor))
15359 t = build_int_cst (TREE_TYPE (value), -divisor);
15360 value = size_binop (BIT_AND_EXPR, value, t);
15365 div = build_int_cst (TREE_TYPE (value), divisor);
15366 value = size_binop (FLOOR_DIV_EXPR, value, div);
15367 value = size_binop (MULT_EXPR, value, div);
15373 /* Returns the pointer to the base of the object addressed by EXP and
15374 extracts the information about the offset of the access, storing it
15375 to PBITPOS and POFFSET. */
15378 split_address_to_core_and_offset (tree exp,
15379 HOST_WIDE_INT *pbitpos, tree *poffset)
15382 enum machine_mode mode;
15383 int unsignedp, volatilep;
15384 HOST_WIDE_INT bitsize;
15386 if (TREE_CODE (exp) == ADDR_EXPR)
15388 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15389 poffset, &mode, &unsignedp, &volatilep,
15391 core = fold_addr_expr (core);
15397 *poffset = NULL_TREE;
15403 /* Returns true if addresses of E1 and E2 differ by a constant, false
15404 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15407 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15410 HOST_WIDE_INT bitpos1, bitpos2;
15411 tree toffset1, toffset2, tdiff, type;
15413 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15414 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15416 if (bitpos1 % BITS_PER_UNIT != 0
15417 || bitpos2 % BITS_PER_UNIT != 0
15418 || !operand_equal_p (core1, core2, 0))
15421 if (toffset1 && toffset2)
15423 type = TREE_TYPE (toffset1);
15424 if (type != TREE_TYPE (toffset2))
15425 toffset2 = fold_convert (type, toffset2);
15427 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15428 if (!cst_and_fits_in_hwi (tdiff))
15431 *diff = int_cst_value (tdiff);
15433 else if (toffset1 || toffset2)
15435 /* If only one of the offsets is non-constant, the difference cannot
15442 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15446 /* Simplify the floating point expression EXP when the sign of the
15447 result is not significant. Return NULL_TREE if no simplification
15451 fold_strip_sign_ops (tree exp)
15455 switch (TREE_CODE (exp))
15459 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15460 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15464 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15466 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15467 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15468 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15469 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15470 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15471 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15474 case COMPOUND_EXPR:
15475 arg0 = TREE_OPERAND (exp, 0);
15476 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15478 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15482 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15483 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15485 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15486 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15487 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15492 const enum built_in_function fcode = builtin_mathfn_code (exp);
15495 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15496 /* Strip copysign function call, return the 1st argument. */
15497 arg0 = CALL_EXPR_ARG (exp, 0);
15498 arg1 = CALL_EXPR_ARG (exp, 1);
15499 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15502 /* Strip sign ops from the argument of "odd" math functions. */
15503 if (negate_mathfn_p (fcode))
15505 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15507 return build_call_expr (get_callee_fndecl (exp), 1, arg0);