1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
157 We do that by representing the two-word integer in 4 words, with only
158 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
159 number. The value of the word is LOWPART + HIGHPART * BASE. */
162 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
163 #define HIGHPART(x) \
164 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
165 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167 /* Unpack a two-word integer into 4 words.
168 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
169 WORDS points to the array of HOST_WIDE_INTs. */
172 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 words[0] = LOWPART (low);
175 words[1] = HIGHPART (low);
176 words[2] = LOWPART (hi);
177 words[3] = HIGHPART (hi);
180 /* Pack an array of 4 words into a two-word integer.
181 WORDS points to the array of words.
182 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 *low = words[0] + words[1] * BASE;
189 *hi = words[2] + words[3] * BASE;
192 /* Force the double-word integer L1, H1 to be within the range of the
193 integer type TYPE. Stores the properly truncated and sign-extended
194 double-word integer in *LV, *HV. Returns true if the operation
195 overflows, that is, argument and result are different. */
198 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
199 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
201 unsigned HOST_WIDE_INT low0 = l1;
202 HOST_WIDE_INT high0 = h1;
204 int sign_extended_type;
206 if (POINTER_TYPE_P (type)
207 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = TYPE_PRECISION (type);
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
283 int sign_extended_type;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 unsigned HOST_WIDE_INT l;
329 h = h1 + h2 + (l < l1);
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 return (*hv & h1) < 0;
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
386 memset (prod, 0, sizeof prod);
388 for (i = 0; i < 4; i++)
391 for (j = 0; j < 4; j++)
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
407 /* Unsigned overflow is immediate. */
409 return (toplow | tophigh) != 0;
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 unsigned HOST_WIDE_INT signmask;
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
445 if (SHIFT_COUNT_TRUNCATED)
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
455 else if (count >= HOST_BITS_PER_WIDE_INT)
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
467 /* Sign extend all bits that are beyond the precision. */
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 unsigned HOST_WIDE_INT signmask;
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 if (SHIFT_COUNT_TRUNCATED)
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
516 else if (count >= HOST_BITS_PER_WIDE_INT)
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 /* Zero / sign extend all bits that are beyond the precision. */
530 if (count >= (HOST_WIDE_INT)prec)
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
629 /* Calculate quotient sign and convert operands to unsigned. */
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
643 neg_double (lden, hden, &lden, &hden);
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
650 /* This unsigned division rounds toward zero. */
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
664 memset (quo, 0, sizeof quo);
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
701 scale = BASE / (den[den_hi_sig] + 1);
703 { /* scale divisor and dividend */
705 for (i = 0; i <= 4 - 1; i++)
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
714 for (i = 0; i <= 4 - 1; i++)
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
752 for (j = 0; j <= den_hi_sig; j++)
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
774 num [num_hi_sig] += carry;
777 /* Store the quotient digit. */
782 decode (quo, lquo, hquo);
785 /* If result is negative, make it so. */
787 neg_double (*lquo, *hquo, lquo, hquo);
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
825 case ROUND_MOD_EXPR: /* round to closest integer */
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
832 /* Get absolute values. */
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
836 neg_double (lden, hden, &labs_den, &habs_den);
838 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, <wice, &htwice);
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den <= ltwice)))
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 unsigned HOST_WIDE_INT int1l, int2l;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 tree type = TREE_TYPE (arg1);
885 int uns = TYPE_UNSIGNED (type);
887 int1l = TREE_INT_CST_LOW (arg1);
888 int1h = TREE_INT_CST_HIGH (arg1);
889 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
890 &obj[some_exotic_number]. */
891 if (POINTER_TYPE_P (type))
894 type = signed_type_for (type);
895 fit_double_type (int1l, int1h, &int1l, &int1h,
899 fit_double_type (int1l, int1h, &int1l, &int1h, type);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
908 return build_int_cst_wide (type, quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = code;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
975 if (gimple_no_warning_p (stmt))
978 /* Use the smallest code level when deciding to issue the
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
987 locus = input_location;
989 locus = gimple_location (stmt);
990 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1198 tree tem = strip_float_extensions (t);
1200 return negate_expr_p (tem);
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1234 fold_negate_expr (tree t)
1236 tree type = TREE_TYPE (t);
1239 switch (TREE_CODE (t))
1241 /* Convert - (~A) to A + 1. */
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 tem = fold_negate_const (t, type);
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1280 if (negate_expr_p (t))
1281 return fold_build2 (COMPLEX_EXPR, type,
1282 fold_negate_expr (TREE_OPERAND (t, 0)),
1283 fold_negate_expr (TREE_OPERAND (t, 1)));
1287 if (negate_expr_p (t))
1288 return fold_build1 (CONJ_EXPR, type,
1289 fold_negate_expr (TREE_OPERAND (t, 0)));
1293 return TREE_OPERAND (t, 0);
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2 (MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2 (MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2 (MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1329 if (TYPE_UNSIGNED (type))
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2 (TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2 (TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2 (TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert (type, negate_expr (tem));
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr (fndecl, 1, arg);
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert (type, temp);
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1439 negate_expr (tree t)
1446 type = TREE_TYPE (t);
1447 STRIP_SIGN_NOPS (t);
1449 tem = fold_negate_expr (t);
1451 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1452 return fold_convert (type, tem);
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1470 If IN is itself a literal or constant, return it as appropriate.
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1476 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1477 tree *minus_litp, int negate_p)
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in);
1488 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1489 || TREE_CODE (in) == FIXED_CST)
1491 else if (TREE_CODE (in) == code
1492 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1499 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1501 tree op0 = TREE_OPERAND (in, 0);
1502 tree op1 = TREE_OPERAND (in, 1);
1503 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1504 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1508 || TREE_CODE (op0) == FIXED_CST)
1509 *litp = op0, op0 = 0;
1510 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1511 || TREE_CODE (op1) == FIXED_CST)
1512 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1514 if (op0 != 0 && TREE_CONSTANT (op0))
1515 *conp = op0, op0 = 0;
1516 else if (op1 != 0 && TREE_CONSTANT (op1))
1517 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0 != 0 && op1 != 0)
1526 var = op1, neg_var_p = neg1_p;
1528 /* Now do any needed negations. */
1530 *minus_litp = *litp, *litp = 0;
1532 *conp = negate_expr (*conp);
1534 var = negate_expr (var);
1536 else if (TREE_CONSTANT (in))
1544 *minus_litp = *litp, *litp = 0;
1545 else if (*minus_litp)
1546 *litp = *minus_litp, *minus_litp = 0;
1547 *conp = negate_expr (*conp);
1548 var = negate_expr (var);
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1559 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1570 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1572 if (code == PLUS_EXPR)
1574 if (TREE_CODE (t1) == NEGATE_EXPR)
1575 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1576 fold_convert (type, TREE_OPERAND (t1, 0)));
1577 else if (TREE_CODE (t2) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1579 fold_convert (type, TREE_OPERAND (t2, 0)));
1580 else if (integer_zerop (t2))
1581 return fold_convert (type, t1);
1583 else if (code == MINUS_EXPR)
1585 if (integer_zerop (t2))
1586 return fold_convert (type, t1);
1589 return build2 (code, type, fold_convert (type, t1),
1590 fold_convert (type, t2));
1593 return fold_build2 (code, type, fold_convert (type, t1),
1594 fold_convert (type, t2));
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1601 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1603 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1620 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1621 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1622 && TYPE_MODE (type1) == TYPE_MODE (type2);
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1633 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1635 unsigned HOST_WIDE_INT int1l, int2l;
1636 HOST_WIDE_INT int1h, int2h;
1637 unsigned HOST_WIDE_INT low;
1639 unsigned HOST_WIDE_INT garbagel;
1640 HOST_WIDE_INT garbageh;
1642 tree type = TREE_TYPE (arg1);
1643 int uns = TYPE_UNSIGNED (type);
1645 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1648 int1l = TREE_INT_CST_LOW (arg1);
1649 int1h = TREE_INT_CST_HIGH (arg1);
1650 int2l = TREE_INT_CST_LOW (arg2);
1651 int2h = TREE_INT_CST_HIGH (arg2);
1656 low = int1l | int2l, hi = int1h | int2h;
1660 low = int1l ^ int2l, hi = int1h ^ int2h;
1664 low = int1l & int2l, hi = int1h & int2h;
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1685 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 neg_double (int2l, int2h, &low, &hi);
1690 add_double (int1l, int1h, low, hi, &low, &hi);
1691 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1695 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 if (code == CEIL_DIV_EXPR)
1710 low = int1l / int2l, hi = 0;
1714 /* ... fall through ... */
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1719 if (int2h == 0 && int2l == 1)
1721 low = int1l, hi = int1h;
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 if (code == CEIL_MOD_EXPR)
1744 low = int1l % int2l, hi = 0;
1748 /* ... fall through ... */
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1773 low = int2l, hi = int2h;
1782 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1789 TREE_OVERFLOW (t) = 1;
1793 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1794 ((!uns || is_sizetype) && overflow)
1795 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1808 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1810 /* Sanity check for the recursive cases. */
1817 if (TREE_CODE (arg1) == INTEGER_CST)
1818 return int_const_binop (code, arg1, arg2, notrunc);
1820 if (TREE_CODE (arg1) == REAL_CST)
1822 enum machine_mode mode;
1825 REAL_VALUE_TYPE value;
1826 REAL_VALUE_TYPE result;
1830 /* The following codes are handled by real_arithmetic. */
1845 d1 = TREE_REAL_CST (arg1);
1846 d2 = TREE_REAL_CST (arg2);
1848 type = TREE_TYPE (arg1);
1849 mode = TYPE_MODE (type);
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode)
1854 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code == RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2, dconst0)
1861 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1))
1868 else if (REAL_VALUE_ISNAN (d2))
1871 inexact = real_arithmetic (&value, code, &d1, &d2);
1872 real_convert (&result, mode, &value);
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode)
1878 && REAL_VALUE_ISINF (result)
1879 && !REAL_VALUE_ISINF (d1)
1880 && !REAL_VALUE_ISINF (d2))
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1907 /* The following codes are handled by fixed_arithmetic. */
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1935 TREE_OVERFLOW (t) = 1;
1939 if (TREE_CODE (arg1) == COMPLEX_CST)
1941 tree type = TREE_TYPE (arg1);
1942 tree r1 = TREE_REALPART (arg1);
1943 tree i1 = TREE_IMAGPART (arg1);
1944 tree r2 = TREE_REALPART (arg2);
1945 tree i2 = TREE_IMAGPART (arg2);
1952 real = const_binop (code, r1, r2, notrunc);
1953 imag = const_binop (code, i1, i2, notrunc);
1957 real = const_binop (MINUS_EXPR,
1958 const_binop (MULT_EXPR, r1, r2, notrunc),
1959 const_binop (MULT_EXPR, i1, i2, notrunc),
1961 imag = const_binop (PLUS_EXPR,
1962 const_binop (MULT_EXPR, r1, i2, notrunc),
1963 const_binop (MULT_EXPR, i1, r2, notrunc),
1970 = const_binop (PLUS_EXPR,
1971 const_binop (MULT_EXPR, r2, r2, notrunc),
1972 const_binop (MULT_EXPR, i2, i2, notrunc),
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r1, r2, notrunc),
1977 const_binop (MULT_EXPR, i1, i2, notrunc),
1980 = const_binop (MINUS_EXPR,
1981 const_binop (MULT_EXPR, i1, r2, notrunc),
1982 const_binop (MULT_EXPR, r1, i2, notrunc),
1985 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1986 code = TRUNC_DIV_EXPR;
1988 real = const_binop (code, t1, magsquared, notrunc);
1989 imag = const_binop (code, t2, magsquared, notrunc);
1998 return build_complex (type, real, imag);
2004 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2005 indicates which particular sizetype to create. */
2008 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2010 return build_int_cst (sizetype_tab[(int) kind], number);
2013 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2014 is a tree code. The type of the result is taken from the operands.
2015 Both must be equivalent integer types, ala int_binop_types_match_p.
2016 If the operands are constant, so is the result. */
2019 size_binop (enum tree_code code, tree arg0, tree arg1)
2021 tree type = TREE_TYPE (arg0);
2023 if (arg0 == error_mark_node || arg1 == error_mark_node)
2024 return error_mark_node;
2026 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2029 /* Handle the special case of two integer constants faster. */
2030 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2032 /* And some specific cases even faster than that. */
2033 if (code == PLUS_EXPR)
2035 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2037 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2040 else if (code == MINUS_EXPR)
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2045 else if (code == MULT_EXPR)
2047 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2051 /* Handle general case of two integer constants. */
2052 return int_const_binop (code, arg0, arg1, 0);
2055 return fold_build2 (code, type, arg0, arg1);
2058 /* Given two values, either both of sizetype or both of bitsizetype,
2059 compute the difference between the two values. Return the value
2060 in signed type corresponding to the type of the operands. */
2063 size_diffop (tree arg0, tree arg1)
2065 tree type = TREE_TYPE (arg0);
2068 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2071 /* If the type is already signed, just do the simple thing. */
2072 if (!TYPE_UNSIGNED (type))
2073 return size_binop (MINUS_EXPR, arg0, arg1);
2075 if (type == sizetype)
2077 else if (type == bitsizetype)
2078 ctype = sbitsizetype;
2080 ctype = signed_type_for (type);
2082 /* If either operand is not a constant, do the conversions to the signed
2083 type and subtract. The hardware will do the right thing with any
2084 overflow in the subtraction. */
2085 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2086 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2087 fold_convert (ctype, arg1));
2089 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2090 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2091 overflow) and negate (which can't either). Special-case a result
2092 of zero while we're here. */
2093 if (tree_int_cst_equal (arg0, arg1))
2094 return build_int_cst (ctype, 0);
2095 else if (tree_int_cst_lt (arg1, arg0))
2096 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2098 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2099 fold_convert (ctype, size_binop (MINUS_EXPR,
2103 /* A subroutine of fold_convert_const handling conversions of an
2104 INTEGER_CST to another integer type. */
2107 fold_convert_const_int_from_int (tree type, const_tree arg1)
2111 /* Given an integer constant, make new constant with new type,
2112 appropriately sign-extended or truncated. */
2113 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2114 TREE_INT_CST_HIGH (arg1),
2115 /* Don't set the overflow when
2116 converting from a pointer, */
2117 !POINTER_TYPE_P (TREE_TYPE (arg1))
2118 /* or to a sizetype with same signedness
2119 and the precision is unchanged.
2120 ??? sizetype is always sign-extended,
2121 but its signedness depends on the
2122 frontend. Thus we see spurious overflows
2123 here if we do not check this. */
2124 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2125 == TYPE_PRECISION (type))
2126 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2127 == TYPE_UNSIGNED (type))
2128 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2129 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2130 || (TREE_CODE (type) == INTEGER_TYPE
2131 && TYPE_IS_SIZETYPE (type)))),
2132 (TREE_INT_CST_HIGH (arg1) < 0
2133 && (TYPE_UNSIGNED (type)
2134 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2135 | TREE_OVERFLOW (arg1));
2140 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2141 to an integer type. */
2144 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2149 /* The following code implements the floating point to integer
2150 conversion rules required by the Java Language Specification,
2151 that IEEE NaNs are mapped to zero and values that overflow
2152 the target precision saturate, i.e. values greater than
2153 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2154 are mapped to INT_MIN. These semantics are allowed by the
2155 C and C++ standards that simply state that the behavior of
2156 FP-to-integer conversion is unspecified upon overflow. */
2158 HOST_WIDE_INT high, low;
2160 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2164 case FIX_TRUNC_EXPR:
2165 real_trunc (&r, VOIDmode, &x);
2172 /* If R is NaN, return zero and show we have an overflow. */
2173 if (REAL_VALUE_ISNAN (r))
2180 /* See if R is less than the lower bound or greater than the
2185 tree lt = TYPE_MIN_VALUE (type);
2186 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2187 if (REAL_VALUES_LESS (r, l))
2190 high = TREE_INT_CST_HIGH (lt);
2191 low = TREE_INT_CST_LOW (lt);
2197 tree ut = TYPE_MAX_VALUE (type);
2200 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2201 if (REAL_VALUES_LESS (u, r))
2204 high = TREE_INT_CST_HIGH (ut);
2205 low = TREE_INT_CST_LOW (ut);
2211 REAL_VALUE_TO_INT (&low, &high, r);
2213 t = force_fit_type_double (type, low, high, -1,
2214 overflow | TREE_OVERFLOW (arg1));
2218 /* A subroutine of fold_convert_const handling conversions of a
2219 FIXED_CST to an integer type. */
2222 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2225 double_int temp, temp_trunc;
2228 /* Right shift FIXED_CST to temp by fbit. */
2229 temp = TREE_FIXED_CST (arg1).data;
2230 mode = TREE_FIXED_CST (arg1).mode;
2231 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2233 lshift_double (temp.low, temp.high,
2234 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2235 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2237 /* Left shift temp to temp_trunc by fbit. */
2238 lshift_double (temp.low, temp.high,
2239 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp_trunc.low, &temp_trunc.high,
2241 SIGNED_FIXED_POINT_MODE_P (mode));
2248 temp_trunc.high = 0;
2251 /* If FIXED_CST is negative, we need to round the value toward 0.
2252 By checking if the fractional bits are not zero to add 1 to temp. */
2253 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2254 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2259 temp = double_int_add (temp, one);
2262 /* Given a fixed-point constant, make new constant with new type,
2263 appropriately sign-extended or truncated. */
2264 t = force_fit_type_double (type, temp.low, temp.high, -1,
2266 && (TYPE_UNSIGNED (type)
2267 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2268 | TREE_OVERFLOW (arg1));
2273 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2274 to another floating point type. */
2277 fold_convert_const_real_from_real (tree type, const_tree arg1)
2279 REAL_VALUE_TYPE value;
2282 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2283 t = build_real (type, value);
2285 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2289 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2290 to a floating point type. */
2293 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2295 REAL_VALUE_TYPE value;
2298 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2299 t = build_real (type, value);
2301 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2305 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2306 to another fixed-point type. */
2309 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2311 FIXED_VALUE_TYPE value;
2315 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2316 TYPE_SATURATING (type));
2317 t = build_fixed (type, value);
2319 /* Propagate overflow flags. */
2320 if (overflow_p | TREE_OVERFLOW (arg1))
2321 TREE_OVERFLOW (t) = 1;
2325 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2326 to a fixed-point type. */
2329 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2331 FIXED_VALUE_TYPE value;
2335 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2336 TREE_INT_CST (arg1),
2337 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2338 TYPE_SATURATING (type));
2339 t = build_fixed (type, value);
2341 /* Propagate overflow flags. */
2342 if (overflow_p | TREE_OVERFLOW (arg1))
2343 TREE_OVERFLOW (t) = 1;
2347 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2348 to a fixed-point type. */
2351 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2353 FIXED_VALUE_TYPE value;
2357 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2358 &TREE_REAL_CST (arg1),
2359 TYPE_SATURATING (type));
2360 t = build_fixed (type, value);
2362 /* Propagate overflow flags. */
2363 if (overflow_p | TREE_OVERFLOW (arg1))
2364 TREE_OVERFLOW (t) = 1;
2368 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2369 type TYPE. If no simplification can be done return NULL_TREE. */
2372 fold_convert_const (enum tree_code code, tree type, tree arg1)
2374 if (TREE_TYPE (arg1) == type)
2377 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2378 || TREE_CODE (type) == OFFSET_TYPE)
2380 if (TREE_CODE (arg1) == INTEGER_CST)
2381 return fold_convert_const_int_from_int (type, arg1);
2382 else if (TREE_CODE (arg1) == REAL_CST)
2383 return fold_convert_const_int_from_real (code, type, arg1);
2384 else if (TREE_CODE (arg1) == FIXED_CST)
2385 return fold_convert_const_int_from_fixed (type, arg1);
2387 else if (TREE_CODE (type) == REAL_TYPE)
2389 if (TREE_CODE (arg1) == INTEGER_CST)
2390 return build_real_from_int_cst (type, arg1);
2391 else if (TREE_CODE (arg1) == REAL_CST)
2392 return fold_convert_const_real_from_real (type, arg1);
2393 else if (TREE_CODE (arg1) == FIXED_CST)
2394 return fold_convert_const_real_from_fixed (type, arg1);
2396 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2398 if (TREE_CODE (arg1) == FIXED_CST)
2399 return fold_convert_const_fixed_from_fixed (type, arg1);
2400 else if (TREE_CODE (arg1) == INTEGER_CST)
2401 return fold_convert_const_fixed_from_int (type, arg1);
2402 else if (TREE_CODE (arg1) == REAL_CST)
2403 return fold_convert_const_fixed_from_real (type, arg1);
2408 /* Construct a vector of zero elements of vector type TYPE. */
2411 build_zero_vector (tree type)
2416 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2417 units = TYPE_VECTOR_SUBPARTS (type);
2420 for (i = 0; i < units; i++)
2421 list = tree_cons (NULL_TREE, elem, list);
2422 return build_vector (type, list);
2425 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2428 fold_convertible_p (const_tree type, const_tree arg)
2430 tree orig = TREE_TYPE (arg);
2435 if (TREE_CODE (arg) == ERROR_MARK
2436 || TREE_CODE (type) == ERROR_MARK
2437 || TREE_CODE (orig) == ERROR_MARK)
2440 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2443 switch (TREE_CODE (type))
2445 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2446 case POINTER_TYPE: case REFERENCE_TYPE:
2448 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2449 || TREE_CODE (orig) == OFFSET_TYPE)
2451 return (TREE_CODE (orig) == VECTOR_TYPE
2452 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2455 case FIXED_POINT_TYPE:
2459 return TREE_CODE (type) == TREE_CODE (orig);
2466 /* Convert expression ARG to type TYPE. Used by the middle-end for
2467 simple conversions in preference to calling the front-end's convert. */
2470 fold_convert (tree type, tree arg)
2472 tree orig = TREE_TYPE (arg);
2478 if (TREE_CODE (arg) == ERROR_MARK
2479 || TREE_CODE (type) == ERROR_MARK
2480 || TREE_CODE (orig) == ERROR_MARK)
2481 return error_mark_node;
2483 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2484 return fold_build1 (NOP_EXPR, type, arg);
2486 switch (TREE_CODE (type))
2488 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2489 case POINTER_TYPE: case REFERENCE_TYPE:
2491 if (TREE_CODE (arg) == INTEGER_CST)
2493 tem = fold_convert_const (NOP_EXPR, type, arg);
2494 if (tem != NULL_TREE)
2497 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2498 || TREE_CODE (orig) == OFFSET_TYPE)
2499 return fold_build1 (NOP_EXPR, type, arg);
2500 if (TREE_CODE (orig) == COMPLEX_TYPE)
2502 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2503 return fold_convert (type, tem);
2505 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2506 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2507 return fold_build1 (NOP_EXPR, type, arg);
2510 if (TREE_CODE (arg) == INTEGER_CST)
2512 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2513 if (tem != NULL_TREE)
2516 else if (TREE_CODE (arg) == REAL_CST)
2518 tem = fold_convert_const (NOP_EXPR, type, arg);
2519 if (tem != NULL_TREE)
2522 else if (TREE_CODE (arg) == FIXED_CST)
2524 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2525 if (tem != NULL_TREE)
2529 switch (TREE_CODE (orig))
2532 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2533 case POINTER_TYPE: case REFERENCE_TYPE:
2534 return fold_build1 (FLOAT_EXPR, type, arg);
2537 return fold_build1 (NOP_EXPR, type, arg);
2539 case FIXED_POINT_TYPE:
2540 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2543 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2544 return fold_convert (type, tem);
2550 case FIXED_POINT_TYPE:
2551 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2552 || TREE_CODE (arg) == REAL_CST)
2554 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2555 if (tem != NULL_TREE)
2559 switch (TREE_CODE (orig))
2561 case FIXED_POINT_TYPE:
2566 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2569 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2570 return fold_convert (type, tem);
2577 switch (TREE_CODE (orig))
2580 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2581 case POINTER_TYPE: case REFERENCE_TYPE:
2583 case FIXED_POINT_TYPE:
2584 return build2 (COMPLEX_EXPR, type,
2585 fold_convert (TREE_TYPE (type), arg),
2586 fold_convert (TREE_TYPE (type), integer_zero_node));
2591 if (TREE_CODE (arg) == COMPLEX_EXPR)
2593 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2594 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2595 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2598 arg = save_expr (arg);
2599 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2600 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2601 rpart = fold_convert (TREE_TYPE (type), rpart);
2602 ipart = fold_convert (TREE_TYPE (type), ipart);
2603 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2611 if (integer_zerop (arg))
2612 return build_zero_vector (type);
2613 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2614 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2615 || TREE_CODE (orig) == VECTOR_TYPE);
2616 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2619 tem = fold_ignored_result (arg);
2620 if (TREE_CODE (tem) == MODIFY_EXPR)
2622 return fold_build1 (NOP_EXPR, type, tem);
2629 /* Return false if expr can be assumed not to be an lvalue, true
2633 maybe_lvalue_p (const_tree x)
2635 /* We only need to wrap lvalue tree codes. */
2636 switch (TREE_CODE (x))
2647 case ALIGN_INDIRECT_REF:
2648 case MISALIGNED_INDIRECT_REF:
2650 case ARRAY_RANGE_REF:
2656 case PREINCREMENT_EXPR:
2657 case PREDECREMENT_EXPR:
2659 case TRY_CATCH_EXPR:
2660 case WITH_CLEANUP_EXPR:
2671 /* Assume the worst for front-end tree codes. */
2672 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2680 /* Return an expr equal to X but certainly not valid as an lvalue. */
2685 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2690 if (! maybe_lvalue_p (x))
2692 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2695 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2696 Zero means allow extended lvalues. */
2698 int pedantic_lvalues;
2700 /* When pedantic, return an expr equal to X but certainly not valid as a
2701 pedantic lvalue. Otherwise, return X. */
2704 pedantic_non_lvalue (tree x)
2706 if (pedantic_lvalues)
2707 return non_lvalue (x);
2712 /* Given a tree comparison code, return the code that is the logical inverse
2713 of the given code. It is not safe to do this for floating-point
2714 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2715 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2718 invert_tree_comparison (enum tree_code code, bool honor_nans)
2720 if (honor_nans && flag_trapping_math)
2730 return honor_nans ? UNLE_EXPR : LE_EXPR;
2732 return honor_nans ? UNLT_EXPR : LT_EXPR;
2734 return honor_nans ? UNGE_EXPR : GE_EXPR;
2736 return honor_nans ? UNGT_EXPR : GT_EXPR;
2750 return UNORDERED_EXPR;
2751 case UNORDERED_EXPR:
2752 return ORDERED_EXPR;
2758 /* Similar, but return the comparison that results if the operands are
2759 swapped. This is safe for floating-point. */
2762 swap_tree_comparison (enum tree_code code)
2769 case UNORDERED_EXPR:
2795 /* Convert a comparison tree code from an enum tree_code representation
2796 into a compcode bit-based encoding. This function is the inverse of
2797 compcode_to_comparison. */
2799 static enum comparison_code
2800 comparison_to_compcode (enum tree_code code)
2817 return COMPCODE_ORD;
2818 case UNORDERED_EXPR:
2819 return COMPCODE_UNORD;
2821 return COMPCODE_UNLT;
2823 return COMPCODE_UNEQ;
2825 return COMPCODE_UNLE;
2827 return COMPCODE_UNGT;
2829 return COMPCODE_LTGT;
2831 return COMPCODE_UNGE;
2837 /* Convert a compcode bit-based encoding of a comparison operator back
2838 to GCC's enum tree_code representation. This function is the
2839 inverse of comparison_to_compcode. */
2841 static enum tree_code
2842 compcode_to_comparison (enum comparison_code code)
2859 return ORDERED_EXPR;
2860 case COMPCODE_UNORD:
2861 return UNORDERED_EXPR;
2879 /* Return a tree for the comparison which is the combination of
2880 doing the AND or OR (depending on CODE) of the two operations LCODE
2881 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2882 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2883 if this makes the transformation invalid. */
2886 combine_comparisons (enum tree_code code, enum tree_code lcode,
2887 enum tree_code rcode, tree truth_type,
2888 tree ll_arg, tree lr_arg)
2890 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2891 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2892 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2893 enum comparison_code compcode;
2897 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2898 compcode = lcompcode & rcompcode;
2901 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2902 compcode = lcompcode | rcompcode;
2911 /* Eliminate unordered comparisons, as well as LTGT and ORD
2912 which are not used unless the mode has NaNs. */
2913 compcode &= ~COMPCODE_UNORD;
2914 if (compcode == COMPCODE_LTGT)
2915 compcode = COMPCODE_NE;
2916 else if (compcode == COMPCODE_ORD)
2917 compcode = COMPCODE_TRUE;
2919 else if (flag_trapping_math)
2921 /* Check that the original operation and the optimized ones will trap
2922 under the same condition. */
2923 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2924 && (lcompcode != COMPCODE_EQ)
2925 && (lcompcode != COMPCODE_ORD);
2926 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2927 && (rcompcode != COMPCODE_EQ)
2928 && (rcompcode != COMPCODE_ORD);
2929 bool trap = (compcode & COMPCODE_UNORD) == 0
2930 && (compcode != COMPCODE_EQ)
2931 && (compcode != COMPCODE_ORD);
2933 /* In a short-circuited boolean expression the LHS might be
2934 such that the RHS, if evaluated, will never trap. For
2935 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2936 if neither x nor y is NaN. (This is a mixed blessing: for
2937 example, the expression above will never trap, hence
2938 optimizing it to x < y would be invalid). */
2939 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2940 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2943 /* If the comparison was short-circuited, and only the RHS
2944 trapped, we may now generate a spurious trap. */
2946 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2949 /* If we changed the conditions that cause a trap, we lose. */
2950 if ((ltrap || rtrap) != trap)
2954 if (compcode == COMPCODE_TRUE)
2955 return constant_boolean_node (true, truth_type);
2956 else if (compcode == COMPCODE_FALSE)
2957 return constant_boolean_node (false, truth_type);
2959 return fold_build2 (compcode_to_comparison (compcode),
2960 truth_type, ll_arg, lr_arg);
2963 /* Return nonzero if two operands (typically of the same tree node)
2964 are necessarily equal. If either argument has side-effects this
2965 function returns zero. FLAGS modifies behavior as follows:
2967 If OEP_ONLY_CONST is set, only return nonzero for constants.
2968 This function tests whether the operands are indistinguishable;
2969 it does not test whether they are equal using C's == operation.
2970 The distinction is important for IEEE floating point, because
2971 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2972 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2974 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2975 even though it may hold multiple values during a function.
2976 This is because a GCC tree node guarantees that nothing else is
2977 executed between the evaluation of its "operands" (which may often
2978 be evaluated in arbitrary order). Hence if the operands themselves
2979 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2980 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2981 unset means assuming isochronic (or instantaneous) tree equivalence.
2982 Unless comparing arbitrary expression trees, such as from different
2983 statements, this flag can usually be left unset.
2985 If OEP_PURE_SAME is set, then pure functions with identical arguments
2986 are considered the same. It is used when the caller has other ways
2987 to ensure that global memory is unchanged in between. */
2990 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2992 /* If either is ERROR_MARK, they aren't equal. */
2993 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2996 /* Check equality of integer constants before bailing out due to
2997 precision differences. */
2998 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2999 return tree_int_cst_equal (arg0, arg1);
3001 /* If both types don't have the same signedness, then we can't consider
3002 them equal. We must check this before the STRIP_NOPS calls
3003 because they may change the signedness of the arguments. As pointers
3004 strictly don't have a signedness, require either two pointers or
3005 two non-pointers as well. */
3006 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3007 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3010 /* If both types don't have the same precision, then it is not safe
3012 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3018 /* In case both args are comparisons but with different comparison
3019 code, try to swap the comparison operands of one arg to produce
3020 a match and compare that variant. */
3021 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3022 && COMPARISON_CLASS_P (arg0)
3023 && COMPARISON_CLASS_P (arg1))
3025 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3027 if (TREE_CODE (arg0) == swap_code)
3028 return operand_equal_p (TREE_OPERAND (arg0, 0),
3029 TREE_OPERAND (arg1, 1), flags)
3030 && operand_equal_p (TREE_OPERAND (arg0, 1),
3031 TREE_OPERAND (arg1, 0), flags);
3034 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3035 /* This is needed for conversions and for COMPONENT_REF.
3036 Might as well play it safe and always test this. */
3037 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3038 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3039 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3042 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3043 We don't care about side effects in that case because the SAVE_EXPR
3044 takes care of that for us. In all other cases, two expressions are
3045 equal if they have no side effects. If we have two identical
3046 expressions with side effects that should be treated the same due
3047 to the only side effects being identical SAVE_EXPR's, that will
3048 be detected in the recursive calls below. */
3049 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3050 && (TREE_CODE (arg0) == SAVE_EXPR
3051 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3054 /* Next handle constant cases, those for which we can return 1 even
3055 if ONLY_CONST is set. */
3056 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3057 switch (TREE_CODE (arg0))
3060 return tree_int_cst_equal (arg0, arg1);
3063 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3064 TREE_FIXED_CST (arg1));
3067 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3068 TREE_REAL_CST (arg1)))
3072 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3074 /* If we do not distinguish between signed and unsigned zero,
3075 consider them equal. */
3076 if (real_zerop (arg0) && real_zerop (arg1))
3085 v1 = TREE_VECTOR_CST_ELTS (arg0);
3086 v2 = TREE_VECTOR_CST_ELTS (arg1);
3089 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3092 v1 = TREE_CHAIN (v1);
3093 v2 = TREE_CHAIN (v2);
3100 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3102 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3106 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3107 && ! memcmp (TREE_STRING_POINTER (arg0),
3108 TREE_STRING_POINTER (arg1),
3109 TREE_STRING_LENGTH (arg0)));
3112 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3118 if (flags & OEP_ONLY_CONST)
3121 /* Define macros to test an operand from arg0 and arg1 for equality and a
3122 variant that allows null and views null as being different from any
3123 non-null value. In the latter case, if either is null, the both
3124 must be; otherwise, do the normal comparison. */
3125 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3126 TREE_OPERAND (arg1, N), flags)
3128 #define OP_SAME_WITH_NULL(N) \
3129 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3130 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3132 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3135 /* Two conversions are equal only if signedness and modes match. */
3136 switch (TREE_CODE (arg0))
3139 case FIX_TRUNC_EXPR:
3140 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3141 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3151 case tcc_comparison:
3153 if (OP_SAME (0) && OP_SAME (1))
3156 /* For commutative ops, allow the other order. */
3157 return (commutative_tree_code (TREE_CODE (arg0))
3158 && operand_equal_p (TREE_OPERAND (arg0, 0),
3159 TREE_OPERAND (arg1, 1), flags)
3160 && operand_equal_p (TREE_OPERAND (arg0, 1),
3161 TREE_OPERAND (arg1, 0), flags));
3164 /* If either of the pointer (or reference) expressions we are
3165 dereferencing contain a side effect, these cannot be equal. */
3166 if (TREE_SIDE_EFFECTS (arg0)
3167 || TREE_SIDE_EFFECTS (arg1))
3170 switch (TREE_CODE (arg0))
3173 case ALIGN_INDIRECT_REF:
3174 case MISALIGNED_INDIRECT_REF:
3180 case ARRAY_RANGE_REF:
3181 /* Operands 2 and 3 may be null.
3182 Compare the array index by value if it is constant first as we
3183 may have different types but same value here. */
3185 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3186 TREE_OPERAND (arg1, 1))
3188 && OP_SAME_WITH_NULL (2)
3189 && OP_SAME_WITH_NULL (3));
3192 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3193 may be NULL when we're called to compare MEM_EXPRs. */
3194 return OP_SAME_WITH_NULL (0)
3196 && OP_SAME_WITH_NULL (2);
3199 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3205 case tcc_expression:
3206 switch (TREE_CODE (arg0))
3209 case TRUTH_NOT_EXPR:
3212 case TRUTH_ANDIF_EXPR:
3213 case TRUTH_ORIF_EXPR:
3214 return OP_SAME (0) && OP_SAME (1);
3216 case TRUTH_AND_EXPR:
3218 case TRUTH_XOR_EXPR:
3219 if (OP_SAME (0) && OP_SAME (1))
3222 /* Otherwise take into account this is a commutative operation. */
3223 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3224 TREE_OPERAND (arg1, 1), flags)
3225 && operand_equal_p (TREE_OPERAND (arg0, 1),
3226 TREE_OPERAND (arg1, 0), flags));
3229 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3236 switch (TREE_CODE (arg0))
3239 /* If the CALL_EXPRs call different functions, then they
3240 clearly can not be equal. */
3241 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3246 unsigned int cef = call_expr_flags (arg0);
3247 if (flags & OEP_PURE_SAME)
3248 cef &= ECF_CONST | ECF_PURE;
3255 /* Now see if all the arguments are the same. */
3257 const_call_expr_arg_iterator iter0, iter1;
3259 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3260 a1 = first_const_call_expr_arg (arg1, &iter1);
3262 a0 = next_const_call_expr_arg (&iter0),
3263 a1 = next_const_call_expr_arg (&iter1))
3264 if (! operand_equal_p (a0, a1, flags))
3267 /* If we get here and both argument lists are exhausted
3268 then the CALL_EXPRs are equal. */
3269 return ! (a0 || a1);
3275 case tcc_declaration:
3276 /* Consider __builtin_sqrt equal to sqrt. */
3277 return (TREE_CODE (arg0) == FUNCTION_DECL
3278 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3279 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3280 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3287 #undef OP_SAME_WITH_NULL
3290 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3291 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3293 When in doubt, return 0. */
3296 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3298 int unsignedp1, unsignedpo;
3299 tree primarg0, primarg1, primother;
3300 unsigned int correct_width;
3302 if (operand_equal_p (arg0, arg1, 0))
3305 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3306 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3309 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3310 and see if the inner values are the same. This removes any
3311 signedness comparison, which doesn't matter here. */
3312 primarg0 = arg0, primarg1 = arg1;
3313 STRIP_NOPS (primarg0);
3314 STRIP_NOPS (primarg1);
3315 if (operand_equal_p (primarg0, primarg1, 0))
3318 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3319 actual comparison operand, ARG0.
3321 First throw away any conversions to wider types
3322 already present in the operands. */
3324 primarg1 = get_narrower (arg1, &unsignedp1);
3325 primother = get_narrower (other, &unsignedpo);
3327 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3328 if (unsignedp1 == unsignedpo
3329 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3330 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3332 tree type = TREE_TYPE (arg0);
3334 /* Make sure shorter operand is extended the right way
3335 to match the longer operand. */
3336 primarg1 = fold_convert (signed_or_unsigned_type_for
3337 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3339 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3346 /* See if ARG is an expression that is either a comparison or is performing
3347 arithmetic on comparisons. The comparisons must only be comparing
3348 two different values, which will be stored in *CVAL1 and *CVAL2; if
3349 they are nonzero it means that some operands have already been found.
3350 No variables may be used anywhere else in the expression except in the
3351 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3352 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3354 If this is true, return 1. Otherwise, return zero. */
3357 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3359 enum tree_code code = TREE_CODE (arg);
3360 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3362 /* We can handle some of the tcc_expression cases here. */
3363 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3365 else if (tclass == tcc_expression
3366 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3367 || code == COMPOUND_EXPR))
3368 tclass = tcc_binary;
3370 else if (tclass == tcc_expression && code == SAVE_EXPR
3371 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3373 /* If we've already found a CVAL1 or CVAL2, this expression is
3374 two complex to handle. */
3375 if (*cval1 || *cval2)
3385 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3388 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3389 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3390 cval1, cval2, save_p));
3395 case tcc_expression:
3396 if (code == COND_EXPR)
3397 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3398 cval1, cval2, save_p)
3399 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3400 cval1, cval2, save_p)
3401 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3402 cval1, cval2, save_p));
3405 case tcc_comparison:
3406 /* First see if we can handle the first operand, then the second. For
3407 the second operand, we know *CVAL1 can't be zero. It must be that
3408 one side of the comparison is each of the values; test for the
3409 case where this isn't true by failing if the two operands
3412 if (operand_equal_p (TREE_OPERAND (arg, 0),
3413 TREE_OPERAND (arg, 1), 0))
3417 *cval1 = TREE_OPERAND (arg, 0);
3418 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3420 else if (*cval2 == 0)
3421 *cval2 = TREE_OPERAND (arg, 0);
3422 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3427 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3429 else if (*cval2 == 0)
3430 *cval2 = TREE_OPERAND (arg, 1);
3431 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3443 /* ARG is a tree that is known to contain just arithmetic operations and
3444 comparisons. Evaluate the operations in the tree substituting NEW0 for
3445 any occurrence of OLD0 as an operand of a comparison and likewise for
3449 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3451 tree type = TREE_TYPE (arg);
3452 enum tree_code code = TREE_CODE (arg);
3453 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3455 /* We can handle some of the tcc_expression cases here. */
3456 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3458 else if (tclass == tcc_expression
3459 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3460 tclass = tcc_binary;
3465 return fold_build1 (code, type,
3466 eval_subst (TREE_OPERAND (arg, 0),
3467 old0, new0, old1, new1));
3470 return fold_build2 (code, type,
3471 eval_subst (TREE_OPERAND (arg, 0),
3472 old0, new0, old1, new1),
3473 eval_subst (TREE_OPERAND (arg, 1),
3474 old0, new0, old1, new1));
3476 case tcc_expression:
3480 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3483 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3486 return fold_build3 (code, type,
3487 eval_subst (TREE_OPERAND (arg, 0),
3488 old0, new0, old1, new1),
3489 eval_subst (TREE_OPERAND (arg, 1),
3490 old0, new0, old1, new1),
3491 eval_subst (TREE_OPERAND (arg, 2),
3492 old0, new0, old1, new1));
3496 /* Fall through - ??? */
3498 case tcc_comparison:
3500 tree arg0 = TREE_OPERAND (arg, 0);
3501 tree arg1 = TREE_OPERAND (arg, 1);
3503 /* We need to check both for exact equality and tree equality. The
3504 former will be true if the operand has a side-effect. In that
3505 case, we know the operand occurred exactly once. */
3507 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3509 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3512 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3514 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3517 return fold_build2 (code, type, arg0, arg1);
3525 /* Return a tree for the case when the result of an expression is RESULT
3526 converted to TYPE and OMITTED was previously an operand of the expression
3527 but is now not needed (e.g., we folded OMITTED * 0).
3529 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3530 the conversion of RESULT to TYPE. */
3533 omit_one_operand (tree type, tree result, tree omitted)
3535 tree t = fold_convert (type, result);
3537 /* If the resulting operand is an empty statement, just return the omitted
3538 statement casted to void. */
3539 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3540 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3542 if (TREE_SIDE_EFFECTS (omitted))
3543 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3545 return non_lvalue (t);
3548 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3551 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3553 tree t = fold_convert (type, result);
3555 /* If the resulting operand is an empty statement, just return the omitted
3556 statement casted to void. */
3557 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3558 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3560 if (TREE_SIDE_EFFECTS (omitted))
3561 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3563 return pedantic_non_lvalue (t);
3566 /* Return a tree for the case when the result of an expression is RESULT
3567 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3568 of the expression but are now not needed.
3570 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3571 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3572 evaluated before OMITTED2. Otherwise, if neither has side effects,
3573 just do the conversion of RESULT to TYPE. */
3576 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3578 tree t = fold_convert (type, result);
3580 if (TREE_SIDE_EFFECTS (omitted2))
3581 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3582 if (TREE_SIDE_EFFECTS (omitted1))
3583 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3585 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3589 /* Return a simplified tree node for the truth-negation of ARG. This
3590 never alters ARG itself. We assume that ARG is an operation that
3591 returns a truth value (0 or 1).
3593 FIXME: one would think we would fold the result, but it causes
3594 problems with the dominator optimizer. */
3597 fold_truth_not_expr (tree arg)
3599 tree type = TREE_TYPE (arg);
3600 enum tree_code code = TREE_CODE (arg);
3602 /* If this is a comparison, we can simply invert it, except for
3603 floating-point non-equality comparisons, in which case we just
3604 enclose a TRUTH_NOT_EXPR around what we have. */
3606 if (TREE_CODE_CLASS (code) == tcc_comparison)
3608 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3609 if (FLOAT_TYPE_P (op_type)
3610 && flag_trapping_math
3611 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3612 && code != NE_EXPR && code != EQ_EXPR)
3616 code = invert_tree_comparison (code,
3617 HONOR_NANS (TYPE_MODE (op_type)));
3618 if (code == ERROR_MARK)
3621 return build2 (code, type,
3622 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3629 return constant_boolean_node (integer_zerop (arg), type);
3631 case TRUTH_AND_EXPR:
3632 return build2 (TRUTH_OR_EXPR, type,
3633 invert_truthvalue (TREE_OPERAND (arg, 0)),
3634 invert_truthvalue (TREE_OPERAND (arg, 1)));
3637 return build2 (TRUTH_AND_EXPR, type,
3638 invert_truthvalue (TREE_OPERAND (arg, 0)),
3639 invert_truthvalue (TREE_OPERAND (arg, 1)));
3641 case TRUTH_XOR_EXPR:
3642 /* Here we can invert either operand. We invert the first operand
3643 unless the second operand is a TRUTH_NOT_EXPR in which case our
3644 result is the XOR of the first operand with the inside of the
3645 negation of the second operand. */
3647 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3648 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3649 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3651 return build2 (TRUTH_XOR_EXPR, type,
3652 invert_truthvalue (TREE_OPERAND (arg, 0)),
3653 TREE_OPERAND (arg, 1));
3655 case TRUTH_ANDIF_EXPR:
3656 return build2 (TRUTH_ORIF_EXPR, type,
3657 invert_truthvalue (TREE_OPERAND (arg, 0)),
3658 invert_truthvalue (TREE_OPERAND (arg, 1)));
3660 case TRUTH_ORIF_EXPR:
3661 return build2 (TRUTH_ANDIF_EXPR, type,
3662 invert_truthvalue (TREE_OPERAND (arg, 0)),
3663 invert_truthvalue (TREE_OPERAND (arg, 1)));
3665 case TRUTH_NOT_EXPR:
3666 return TREE_OPERAND (arg, 0);
3670 tree arg1 = TREE_OPERAND (arg, 1);
3671 tree arg2 = TREE_OPERAND (arg, 2);
3672 /* A COND_EXPR may have a throw as one operand, which
3673 then has void type. Just leave void operands
3675 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3676 VOID_TYPE_P (TREE_TYPE (arg1))
3677 ? arg1 : invert_truthvalue (arg1),
3678 VOID_TYPE_P (TREE_TYPE (arg2))
3679 ? arg2 : invert_truthvalue (arg2));
3683 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3684 invert_truthvalue (TREE_OPERAND (arg, 1)));
3686 case NON_LVALUE_EXPR:
3687 return invert_truthvalue (TREE_OPERAND (arg, 0));
3690 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3691 return build1 (TRUTH_NOT_EXPR, type, arg);
3695 return build1 (TREE_CODE (arg), type,
3696 invert_truthvalue (TREE_OPERAND (arg, 0)));
3699 if (!integer_onep (TREE_OPERAND (arg, 1)))
3701 return build2 (EQ_EXPR, type, arg,
3702 build_int_cst (type, 0));
3705 return build1 (TRUTH_NOT_EXPR, type, arg);
3707 case CLEANUP_POINT_EXPR:
3708 return build1 (CLEANUP_POINT_EXPR, type,
3709 invert_truthvalue (TREE_OPERAND (arg, 0)));
3718 /* Return a simplified tree node for the truth-negation of ARG. This
3719 never alters ARG itself. We assume that ARG is an operation that
3720 returns a truth value (0 or 1).
3722 FIXME: one would think we would fold the result, but it causes
3723 problems with the dominator optimizer. */
3726 invert_truthvalue (tree arg)
3730 if (TREE_CODE (arg) == ERROR_MARK)
3733 tem = fold_truth_not_expr (arg);
3735 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3740 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3741 operands are another bit-wise operation with a common input. If so,
3742 distribute the bit operations to save an operation and possibly two if
3743 constants are involved. For example, convert
3744 (A | B) & (A | C) into A | (B & C)
3745 Further simplification will occur if B and C are constants.
3747 If this optimization cannot be done, 0 will be returned. */
3750 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3755 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3756 || TREE_CODE (arg0) == code
3757 || (TREE_CODE (arg0) != BIT_AND_EXPR
3758 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3761 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3763 common = TREE_OPERAND (arg0, 0);
3764 left = TREE_OPERAND (arg0, 1);
3765 right = TREE_OPERAND (arg1, 1);
3767 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3769 common = TREE_OPERAND (arg0, 0);
3770 left = TREE_OPERAND (arg0, 1);
3771 right = TREE_OPERAND (arg1, 0);
3773 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3775 common = TREE_OPERAND (arg0, 1);
3776 left = TREE_OPERAND (arg0, 0);
3777 right = TREE_OPERAND (arg1, 1);
3779 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3781 common = TREE_OPERAND (arg0, 1);
3782 left = TREE_OPERAND (arg0, 0);
3783 right = TREE_OPERAND (arg1, 0);
3788 common = fold_convert (type, common);
3789 left = fold_convert (type, left);
3790 right = fold_convert (type, right);
3791 return fold_build2 (TREE_CODE (arg0), type, common,
3792 fold_build2 (code, type, left, right));
3795 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3796 with code CODE. This optimization is unsafe. */
3798 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3800 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3801 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3803 /* (A / C) +- (B / C) -> (A +- B) / C. */
3805 && operand_equal_p (TREE_OPERAND (arg0, 1),
3806 TREE_OPERAND (arg1, 1), 0))
3807 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3808 fold_build2 (code, type,
3809 TREE_OPERAND (arg0, 0),
3810 TREE_OPERAND (arg1, 0)),
3811 TREE_OPERAND (arg0, 1));
3813 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3814 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3815 TREE_OPERAND (arg1, 0), 0)
3816 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3817 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3819 REAL_VALUE_TYPE r0, r1;
3820 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3821 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3823 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3825 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3826 real_arithmetic (&r0, code, &r0, &r1);
3827 return fold_build2 (MULT_EXPR, type,
3828 TREE_OPERAND (arg0, 0),
3829 build_real (type, r0));
3835 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3836 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3839 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3840 HOST_WIDE_INT bitpos, int unsignedp)
3842 tree result, bftype;
3846 tree size = TYPE_SIZE (TREE_TYPE (inner));
3847 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3848 || POINTER_TYPE_P (TREE_TYPE (inner)))
3849 && host_integerp (size, 0)
3850 && tree_low_cst (size, 0) == bitsize)
3851 return fold_convert (type, inner);
3855 if (TYPE_PRECISION (bftype) != bitsize
3856 || TYPE_UNSIGNED (bftype) == !unsignedp)
3857 bftype = build_nonstandard_integer_type (bitsize, 0);
3859 result = build3 (BIT_FIELD_REF, bftype, inner,
3860 size_int (bitsize), bitsize_int (bitpos));
3863 result = fold_convert (type, result);
3868 /* Optimize a bit-field compare.
3870 There are two cases: First is a compare against a constant and the
3871 second is a comparison of two items where the fields are at the same
3872 bit position relative to the start of a chunk (byte, halfword, word)
3873 large enough to contain it. In these cases we can avoid the shift
3874 implicit in bitfield extractions.
3876 For constants, we emit a compare of the shifted constant with the
3877 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3878 compared. For two fields at the same position, we do the ANDs with the
3879 similar mask and compare the result of the ANDs.
3881 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3882 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3883 are the left and right operands of the comparison, respectively.
3885 If the optimization described above can be done, we return the resulting
3886 tree. Otherwise we return zero. */
3889 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3892 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3893 tree type = TREE_TYPE (lhs);
3894 tree signed_type, unsigned_type;
3895 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3896 enum machine_mode lmode, rmode, nmode;
3897 int lunsignedp, runsignedp;
3898 int lvolatilep = 0, rvolatilep = 0;
3899 tree linner, rinner = NULL_TREE;
3903 /* Get all the information about the extractions being done. If the bit size
3904 if the same as the size of the underlying object, we aren't doing an
3905 extraction at all and so can do nothing. We also don't want to
3906 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3907 then will no longer be able to replace it. */
3908 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3909 &lunsignedp, &lvolatilep, false);
3910 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3911 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3916 /* If this is not a constant, we can only do something if bit positions,
3917 sizes, and signedness are the same. */
3918 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3919 &runsignedp, &rvolatilep, false);
3921 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3922 || lunsignedp != runsignedp || offset != 0
3923 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3927 /* See if we can find a mode to refer to this field. We should be able to,
3928 but fail if we can't. */
3929 nmode = get_best_mode (lbitsize, lbitpos,
3930 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3931 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3932 TYPE_ALIGN (TREE_TYPE (rinner))),
3933 word_mode, lvolatilep || rvolatilep);
3934 if (nmode == VOIDmode)
3937 /* Set signed and unsigned types of the precision of this mode for the
3939 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3940 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3942 /* Compute the bit position and size for the new reference and our offset
3943 within it. If the new reference is the same size as the original, we
3944 won't optimize anything, so return zero. */
3945 nbitsize = GET_MODE_BITSIZE (nmode);
3946 nbitpos = lbitpos & ~ (nbitsize - 1);
3948 if (nbitsize == lbitsize)
3951 if (BYTES_BIG_ENDIAN)
3952 lbitpos = nbitsize - lbitsize - lbitpos;
3954 /* Make the mask to be used against the extracted field. */
3955 mask = build_int_cst_type (unsigned_type, -1);
3956 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3957 mask = const_binop (RSHIFT_EXPR, mask,
3958 size_int (nbitsize - lbitsize - lbitpos), 0);
3961 /* If not comparing with constant, just rework the comparison
3963 return fold_build2 (code, compare_type,
3964 fold_build2 (BIT_AND_EXPR, unsigned_type,
3965 make_bit_field_ref (linner,
3970 fold_build2 (BIT_AND_EXPR, unsigned_type,
3971 make_bit_field_ref (rinner,
3977 /* Otherwise, we are handling the constant case. See if the constant is too
3978 big for the field. Warn and return a tree of for 0 (false) if so. We do
3979 this not only for its own sake, but to avoid having to test for this
3980 error case below. If we didn't, we might generate wrong code.
3982 For unsigned fields, the constant shifted right by the field length should
3983 be all zero. For signed fields, the high-order bits should agree with
3988 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3989 fold_convert (unsigned_type, rhs),
3990 size_int (lbitsize), 0)))
3992 warning (0, "comparison is always %d due to width of bit-field",
3994 return constant_boolean_node (code == NE_EXPR, compare_type);
3999 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4000 size_int (lbitsize - 1), 0);
4001 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4003 warning (0, "comparison is always %d due to width of bit-field",
4005 return constant_boolean_node (code == NE_EXPR, compare_type);
4009 /* Single-bit compares should always be against zero. */
4010 if (lbitsize == 1 && ! integer_zerop (rhs))
4012 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4013 rhs = build_int_cst (type, 0);
4016 /* Make a new bitfield reference, shift the constant over the
4017 appropriate number of bits and mask it with the computed mask
4018 (in case this was a signed field). If we changed it, make a new one. */
4019 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4022 TREE_SIDE_EFFECTS (lhs) = 1;
4023 TREE_THIS_VOLATILE (lhs) = 1;
4026 rhs = const_binop (BIT_AND_EXPR,
4027 const_binop (LSHIFT_EXPR,
4028 fold_convert (unsigned_type, rhs),
4029 size_int (lbitpos), 0),
4032 return build2 (code, compare_type,
4033 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4037 /* Subroutine for fold_truthop: decode a field reference.
4039 If EXP is a comparison reference, we return the innermost reference.
4041 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4042 set to the starting bit number.
4044 If the innermost field can be completely contained in a mode-sized
4045 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4047 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4048 otherwise it is not changed.
4050 *PUNSIGNEDP is set to the signedness of the field.
4052 *PMASK is set to the mask used. This is either contained in a
4053 BIT_AND_EXPR or derived from the width of the field.
4055 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4057 Return 0 if this is not a component reference or is one that we can't
4058 do anything with. */
4061 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4062 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4063 int *punsignedp, int *pvolatilep,
4064 tree *pmask, tree *pand_mask)
4066 tree outer_type = 0;
4068 tree mask, inner, offset;
4070 unsigned int precision;
4072 /* All the optimizations using this function assume integer fields.
4073 There are problems with FP fields since the type_for_size call
4074 below can fail for, e.g., XFmode. */
4075 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4078 /* We are interested in the bare arrangement of bits, so strip everything
4079 that doesn't affect the machine mode. However, record the type of the
4080 outermost expression if it may matter below. */
4081 if (CONVERT_EXPR_P (exp)
4082 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4083 outer_type = TREE_TYPE (exp);
4086 if (TREE_CODE (exp) == BIT_AND_EXPR)
4088 and_mask = TREE_OPERAND (exp, 1);
4089 exp = TREE_OPERAND (exp, 0);
4090 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4091 if (TREE_CODE (and_mask) != INTEGER_CST)
4095 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4096 punsignedp, pvolatilep, false);
4097 if ((inner == exp && and_mask == 0)
4098 || *pbitsize < 0 || offset != 0
4099 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4102 /* If the number of bits in the reference is the same as the bitsize of
4103 the outer type, then the outer type gives the signedness. Otherwise
4104 (in case of a small bitfield) the signedness is unchanged. */
4105 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4106 *punsignedp = TYPE_UNSIGNED (outer_type);
4108 /* Compute the mask to access the bitfield. */
4109 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4110 precision = TYPE_PRECISION (unsigned_type);
4112 mask = build_int_cst_type (unsigned_type, -1);
4114 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4115 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4117 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4119 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4120 fold_convert (unsigned_type, and_mask), mask);
4123 *pand_mask = and_mask;
4127 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4131 all_ones_mask_p (const_tree mask, int size)
4133 tree type = TREE_TYPE (mask);
4134 unsigned int precision = TYPE_PRECISION (type);
4137 tmask = build_int_cst_type (signed_type_for (type), -1);
4140 tree_int_cst_equal (mask,
4141 const_binop (RSHIFT_EXPR,
4142 const_binop (LSHIFT_EXPR, tmask,
4143 size_int (precision - size),
4145 size_int (precision - size), 0));
4148 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4149 represents the sign bit of EXP's type. If EXP represents a sign
4150 or zero extension, also test VAL against the unextended type.
4151 The return value is the (sub)expression whose sign bit is VAL,
4152 or NULL_TREE otherwise. */
4155 sign_bit_p (tree exp, const_tree val)
4157 unsigned HOST_WIDE_INT mask_lo, lo;
4158 HOST_WIDE_INT mask_hi, hi;
4162 /* Tree EXP must have an integral type. */
4163 t = TREE_TYPE (exp);
4164 if (! INTEGRAL_TYPE_P (t))
4167 /* Tree VAL must be an integer constant. */
4168 if (TREE_CODE (val) != INTEGER_CST
4169 || TREE_OVERFLOW (val))
4172 width = TYPE_PRECISION (t);
4173 if (width > HOST_BITS_PER_WIDE_INT)
4175 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4178 mask_hi = ((unsigned HOST_WIDE_INT) -1
4179 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4185 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4188 mask_lo = ((unsigned HOST_WIDE_INT) -1
4189 >> (HOST_BITS_PER_WIDE_INT - width));
4192 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4193 treat VAL as if it were unsigned. */
4194 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4195 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4198 /* Handle extension from a narrower type. */
4199 if (TREE_CODE (exp) == NOP_EXPR
4200 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4201 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4206 /* Subroutine for fold_truthop: determine if an operand is simple enough
4207 to be evaluated unconditionally. */
4210 simple_operand_p (const_tree exp)
4212 /* Strip any conversions that don't change the machine mode. */
4215 return (CONSTANT_CLASS_P (exp)
4216 || TREE_CODE (exp) == SSA_NAME
4218 && ! TREE_ADDRESSABLE (exp)
4219 && ! TREE_THIS_VOLATILE (exp)
4220 && ! DECL_NONLOCAL (exp)
4221 /* Don't regard global variables as simple. They may be
4222 allocated in ways unknown to the compiler (shared memory,
4223 #pragma weak, etc). */
4224 && ! TREE_PUBLIC (exp)
4225 && ! DECL_EXTERNAL (exp)
4226 /* Loading a static variable is unduly expensive, but global
4227 registers aren't expensive. */
4228 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4231 /* The following functions are subroutines to fold_range_test and allow it to
4232 try to change a logical combination of comparisons into a range test.
4235 X == 2 || X == 3 || X == 4 || X == 5
4239 (unsigned) (X - 2) <= 3
4241 We describe each set of comparisons as being either inside or outside
4242 a range, using a variable named like IN_P, and then describe the
4243 range with a lower and upper bound. If one of the bounds is omitted,
4244 it represents either the highest or lowest value of the type.
4246 In the comments below, we represent a range by two numbers in brackets
4247 preceded by a "+" to designate being inside that range, or a "-" to
4248 designate being outside that range, so the condition can be inverted by
4249 flipping the prefix. An omitted bound is represented by a "-". For
4250 example, "- [-, 10]" means being outside the range starting at the lowest
4251 possible value and ending at 10, in other words, being greater than 10.
4252 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4255 We set up things so that the missing bounds are handled in a consistent
4256 manner so neither a missing bound nor "true" and "false" need to be
4257 handled using a special case. */
4259 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4260 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4261 and UPPER1_P are nonzero if the respective argument is an upper bound
4262 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4263 must be specified for a comparison. ARG1 will be converted to ARG0's
4264 type if both are specified. */
4267 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4268 tree arg1, int upper1_p)
4274 /* If neither arg represents infinity, do the normal operation.
4275 Else, if not a comparison, return infinity. Else handle the special
4276 comparison rules. Note that most of the cases below won't occur, but
4277 are handled for consistency. */
4279 if (arg0 != 0 && arg1 != 0)
4281 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4282 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4284 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4287 if (TREE_CODE_CLASS (code) != tcc_comparison)
4290 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4291 for neither. In real maths, we cannot assume open ended ranges are
4292 the same. But, this is computer arithmetic, where numbers are finite.
4293 We can therefore make the transformation of any unbounded range with
4294 the value Z, Z being greater than any representable number. This permits
4295 us to treat unbounded ranges as equal. */
4296 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4297 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4301 result = sgn0 == sgn1;
4304 result = sgn0 != sgn1;
4307 result = sgn0 < sgn1;
4310 result = sgn0 <= sgn1;
4313 result = sgn0 > sgn1;
4316 result = sgn0 >= sgn1;
4322 return constant_boolean_node (result, type);
4325 /* Given EXP, a logical expression, set the range it is testing into
4326 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4327 actually being tested. *PLOW and *PHIGH will be made of the same
4328 type as the returned expression. If EXP is not a comparison, we
4329 will most likely not be returning a useful value and range. Set
4330 *STRICT_OVERFLOW_P to true if the return value is only valid
4331 because signed overflow is undefined; otherwise, do not change
4332 *STRICT_OVERFLOW_P. */
4335 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4336 bool *strict_overflow_p)
4338 enum tree_code code;
4339 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4340 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4342 tree low, high, n_low, n_high;
4344 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4345 and see if we can refine the range. Some of the cases below may not
4346 happen, but it doesn't seem worth worrying about this. We "continue"
4347 the outer loop when we've changed something; otherwise we "break"
4348 the switch, which will "break" the while. */
4351 low = high = build_int_cst (TREE_TYPE (exp), 0);
4355 code = TREE_CODE (exp);
4356 exp_type = TREE_TYPE (exp);
4358 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4360 if (TREE_OPERAND_LENGTH (exp) > 0)
4361 arg0 = TREE_OPERAND (exp, 0);
4362 if (TREE_CODE_CLASS (code) == tcc_comparison
4363 || TREE_CODE_CLASS (code) == tcc_unary
4364 || TREE_CODE_CLASS (code) == tcc_binary)
4365 arg0_type = TREE_TYPE (arg0);
4366 if (TREE_CODE_CLASS (code) == tcc_binary
4367 || TREE_CODE_CLASS (code) == tcc_comparison
4368 || (TREE_CODE_CLASS (code) == tcc_expression
4369 && TREE_OPERAND_LENGTH (exp) > 1))
4370 arg1 = TREE_OPERAND (exp, 1);
4375 case TRUTH_NOT_EXPR:
4376 in_p = ! in_p, exp = arg0;
4379 case EQ_EXPR: case NE_EXPR:
4380 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4381 /* We can only do something if the range is testing for zero
4382 and if the second operand is an integer constant. Note that
4383 saying something is "in" the range we make is done by
4384 complementing IN_P since it will set in the initial case of
4385 being not equal to zero; "out" is leaving it alone. */
4386 if (low == 0 || high == 0
4387 || ! integer_zerop (low) || ! integer_zerop (high)
4388 || TREE_CODE (arg1) != INTEGER_CST)
4393 case NE_EXPR: /* - [c, c] */
4396 case EQ_EXPR: /* + [c, c] */
4397 in_p = ! in_p, low = high = arg1;
4399 case GT_EXPR: /* - [-, c] */
4400 low = 0, high = arg1;
4402 case GE_EXPR: /* + [c, -] */
4403 in_p = ! in_p, low = arg1, high = 0;
4405 case LT_EXPR: /* - [c, -] */
4406 low = arg1, high = 0;
4408 case LE_EXPR: /* + [-, c] */
4409 in_p = ! in_p, low = 0, high = arg1;
4415 /* If this is an unsigned comparison, we also know that EXP is
4416 greater than or equal to zero. We base the range tests we make
4417 on that fact, so we record it here so we can parse existing
4418 range tests. We test arg0_type since often the return type
4419 of, e.g. EQ_EXPR, is boolean. */
4420 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4422 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4424 build_int_cst (arg0_type, 0),
4428 in_p = n_in_p, low = n_low, high = n_high;
4430 /* If the high bound is missing, but we have a nonzero low
4431 bound, reverse the range so it goes from zero to the low bound
4433 if (high == 0 && low && ! integer_zerop (low))
4436 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4437 integer_one_node, 0);
4438 low = build_int_cst (arg0_type, 0);
4446 /* (-x) IN [a,b] -> x in [-b, -a] */
4447 n_low = range_binop (MINUS_EXPR, exp_type,
4448 build_int_cst (exp_type, 0),
4450 n_high = range_binop (MINUS_EXPR, exp_type,
4451 build_int_cst (exp_type, 0),
4453 low = n_low, high = n_high;
4459 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4460 build_int_cst (exp_type, 1));
4463 case PLUS_EXPR: case MINUS_EXPR:
4464 if (TREE_CODE (arg1) != INTEGER_CST)
4467 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4468 move a constant to the other side. */
4469 if (!TYPE_UNSIGNED (arg0_type)
4470 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4473 /* If EXP is signed, any overflow in the computation is undefined,
4474 so we don't worry about it so long as our computations on
4475 the bounds don't overflow. For unsigned, overflow is defined
4476 and this is exactly the right thing. */
4477 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4478 arg0_type, low, 0, arg1, 0);
4479 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4480 arg0_type, high, 1, arg1, 0);
4481 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4482 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4485 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4486 *strict_overflow_p = true;
4488 /* Check for an unsigned range which has wrapped around the maximum
4489 value thus making n_high < n_low, and normalize it. */
4490 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4492 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4493 integer_one_node, 0);
4494 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4495 integer_one_node, 0);
4497 /* If the range is of the form +/- [ x+1, x ], we won't
4498 be able to normalize it. But then, it represents the
4499 whole range or the empty set, so make it
4501 if (tree_int_cst_equal (n_low, low)
4502 && tree_int_cst_equal (n_high, high))
4508 low = n_low, high = n_high;
4513 CASE_CONVERT: case NON_LVALUE_EXPR:
4514 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4517 if (! INTEGRAL_TYPE_P (arg0_type)
4518 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4519 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4522 n_low = low, n_high = high;
4525 n_low = fold_convert (arg0_type, n_low);
4528 n_high = fold_convert (arg0_type, n_high);
4531 /* If we're converting arg0 from an unsigned type, to exp,
4532 a signed type, we will be doing the comparison as unsigned.
4533 The tests above have already verified that LOW and HIGH
4536 So we have to ensure that we will handle large unsigned
4537 values the same way that the current signed bounds treat
4540 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4544 /* For fixed-point modes, we need to pass the saturating flag
4545 as the 2nd parameter. */
4546 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4547 equiv_type = lang_hooks.types.type_for_mode
4548 (TYPE_MODE (arg0_type),
4549 TYPE_SATURATING (arg0_type));
4551 equiv_type = lang_hooks.types.type_for_mode
4552 (TYPE_MODE (arg0_type), 1);
4554 /* A range without an upper bound is, naturally, unbounded.
4555 Since convert would have cropped a very large value, use
4556 the max value for the destination type. */
4558 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4559 : TYPE_MAX_VALUE (arg0_type);
4561 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4562 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4563 fold_convert (arg0_type,
4565 build_int_cst (arg0_type, 1));
4567 /* If the low bound is specified, "and" the range with the
4568 range for which the original unsigned value will be
4572 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4573 1, n_low, n_high, 1,
4574 fold_convert (arg0_type,
4579 in_p = (n_in_p == in_p);
4583 /* Otherwise, "or" the range with the range of the input
4584 that will be interpreted as negative. */
4585 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4586 0, n_low, n_high, 1,
4587 fold_convert (arg0_type,
4592 in_p = (in_p != n_in_p);
4597 low = n_low, high = n_high;
4607 /* If EXP is a constant, we can evaluate whether this is true or false. */
4608 if (TREE_CODE (exp) == INTEGER_CST)
4610 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4612 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4618 *pin_p = in_p, *plow = low, *phigh = high;
4622 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4623 type, TYPE, return an expression to test if EXP is in (or out of, depending
4624 on IN_P) the range. Return 0 if the test couldn't be created. */
4627 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4629 tree etype = TREE_TYPE (exp);
4632 #ifdef HAVE_canonicalize_funcptr_for_compare
4633 /* Disable this optimization for function pointer expressions
4634 on targets that require function pointer canonicalization. */
4635 if (HAVE_canonicalize_funcptr_for_compare
4636 && TREE_CODE (etype) == POINTER_TYPE
4637 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4643 value = build_range_check (type, exp, 1, low, high);
4645 return invert_truthvalue (value);
4650 if (low == 0 && high == 0)
4651 return build_int_cst (type, 1);
4654 return fold_build2 (LE_EXPR, type, exp,
4655 fold_convert (etype, high));
4658 return fold_build2 (GE_EXPR, type, exp,
4659 fold_convert (etype, low));
4661 if (operand_equal_p (low, high, 0))
4662 return fold_build2 (EQ_EXPR, type, exp,
4663 fold_convert (etype, low));
4665 if (integer_zerop (low))
4667 if (! TYPE_UNSIGNED (etype))
4669 etype = unsigned_type_for (etype);
4670 high = fold_convert (etype, high);
4671 exp = fold_convert (etype, exp);
4673 return build_range_check (type, exp, 1, 0, high);
4676 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4677 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4679 unsigned HOST_WIDE_INT lo;
4683 prec = TYPE_PRECISION (etype);
4684 if (prec <= HOST_BITS_PER_WIDE_INT)
4687 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4691 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4692 lo = (unsigned HOST_WIDE_INT) -1;
4695 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4697 if (TYPE_UNSIGNED (etype))
4699 tree signed_etype = signed_type_for (etype);
4700 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4702 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4704 etype = signed_etype;
4705 exp = fold_convert (etype, exp);
4707 return fold_build2 (GT_EXPR, type, exp,
4708 build_int_cst (etype, 0));
4712 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4713 This requires wrap-around arithmetics for the type of the expression. */
4714 switch (TREE_CODE (etype))
4717 /* There is no requirement that LOW be within the range of ETYPE
4718 if the latter is a subtype. It must, however, be within the base
4719 type of ETYPE. So be sure we do the subtraction in that type. */
4720 if (TREE_TYPE (etype))
4721 etype = TREE_TYPE (etype);
4726 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4727 TYPE_UNSIGNED (etype));
4734 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4735 if (TREE_CODE (etype) == INTEGER_TYPE
4736 && !TYPE_OVERFLOW_WRAPS (etype))
4738 tree utype, minv, maxv;
4740 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4741 for the type in question, as we rely on this here. */
4742 utype = unsigned_type_for (etype);
4743 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4744 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4745 integer_one_node, 1);
4746 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4748 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4755 high = fold_convert (etype, high);
4756 low = fold_convert (etype, low);
4757 exp = fold_convert (etype, exp);
4759 value = const_binop (MINUS_EXPR, high, low, 0);
4762 if (POINTER_TYPE_P (etype))
4764 if (value != 0 && !TREE_OVERFLOW (value))
4766 low = fold_convert (sizetype, low);
4767 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4768 return build_range_check (type,
4769 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4770 1, build_int_cst (etype, 0), value);
4775 if (value != 0 && !TREE_OVERFLOW (value))
4776 return build_range_check (type,
4777 fold_build2 (MINUS_EXPR, etype, exp, low),
4778 1, build_int_cst (etype, 0), value);
4783 /* Return the predecessor of VAL in its type, handling the infinite case. */
4786 range_predecessor (tree val)
4788 tree type = TREE_TYPE (val);
4790 if (INTEGRAL_TYPE_P (type)
4791 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4794 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4797 /* Return the successor of VAL in its type, handling the infinite case. */
4800 range_successor (tree val)
4802 tree type = TREE_TYPE (val);
4804 if (INTEGRAL_TYPE_P (type)
4805 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4808 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4811 /* Given two ranges, see if we can merge them into one. Return 1 if we
4812 can, 0 if we can't. Set the output range into the specified parameters. */
4815 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4816 tree high0, int in1_p, tree low1, tree high1)
4824 int lowequal = ((low0 == 0 && low1 == 0)
4825 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4826 low0, 0, low1, 0)));
4827 int highequal = ((high0 == 0 && high1 == 0)
4828 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4829 high0, 1, high1, 1)));
4831 /* Make range 0 be the range that starts first, or ends last if they
4832 start at the same value. Swap them if it isn't. */
4833 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4836 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4837 high1, 1, high0, 1))))
4839 temp = in0_p, in0_p = in1_p, in1_p = temp;
4840 tem = low0, low0 = low1, low1 = tem;
4841 tem = high0, high0 = high1, high1 = tem;
4844 /* Now flag two cases, whether the ranges are disjoint or whether the
4845 second range is totally subsumed in the first. Note that the tests
4846 below are simplified by the ones above. */
4847 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4848 high0, 1, low1, 0));
4849 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4850 high1, 1, high0, 1));
4852 /* We now have four cases, depending on whether we are including or
4853 excluding the two ranges. */
4856 /* If they don't overlap, the result is false. If the second range
4857 is a subset it is the result. Otherwise, the range is from the start
4858 of the second to the end of the first. */
4860 in_p = 0, low = high = 0;
4862 in_p = 1, low = low1, high = high1;
4864 in_p = 1, low = low1, high = high0;
4867 else if (in0_p && ! in1_p)
4869 /* If they don't overlap, the result is the first range. If they are
4870 equal, the result is false. If the second range is a subset of the
4871 first, and the ranges begin at the same place, we go from just after
4872 the end of the second range to the end of the first. If the second
4873 range is not a subset of the first, or if it is a subset and both
4874 ranges end at the same place, the range starts at the start of the
4875 first range and ends just before the second range.
4876 Otherwise, we can't describe this as a single range. */
4878 in_p = 1, low = low0, high = high0;
4879 else if (lowequal && highequal)
4880 in_p = 0, low = high = 0;
4881 else if (subset && lowequal)
4883 low = range_successor (high1);
4888 /* We are in the weird situation where high0 > high1 but
4889 high1 has no successor. Punt. */
4893 else if (! subset || highequal)
4896 high = range_predecessor (low1);
4900 /* low0 < low1 but low1 has no predecessor. Punt. */
4908 else if (! in0_p && in1_p)
4910 /* If they don't overlap, the result is the second range. If the second
4911 is a subset of the first, the result is false. Otherwise,
4912 the range starts just after the first range and ends at the
4913 end of the second. */
4915 in_p = 1, low = low1, high = high1;
4916 else if (subset || highequal)
4917 in_p = 0, low = high = 0;
4920 low = range_successor (high0);
4925 /* high1 > high0 but high0 has no successor. Punt. */
4933 /* The case where we are excluding both ranges. Here the complex case
4934 is if they don't overlap. In that case, the only time we have a
4935 range is if they are adjacent. If the second is a subset of the
4936 first, the result is the first. Otherwise, the range to exclude
4937 starts at the beginning of the first range and ends at the end of the
4941 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4942 range_successor (high0),
4944 in_p = 0, low = low0, high = high1;
4947 /* Canonicalize - [min, x] into - [-, x]. */
4948 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4949 switch (TREE_CODE (TREE_TYPE (low0)))
4952 if (TYPE_PRECISION (TREE_TYPE (low0))
4953 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4957 if (tree_int_cst_equal (low0,
4958 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4962 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4963 && integer_zerop (low0))
4970 /* Canonicalize - [x, max] into - [x, -]. */
4971 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4972 switch (TREE_CODE (TREE_TYPE (high1)))
4975 if (TYPE_PRECISION (TREE_TYPE (high1))
4976 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4980 if (tree_int_cst_equal (high1,
4981 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4985 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4986 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4988 integer_one_node, 1)))
4995 /* The ranges might be also adjacent between the maximum and
4996 minimum values of the given type. For
4997 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4998 return + [x + 1, y - 1]. */
4999 if (low0 == 0 && high1 == 0)
5001 low = range_successor (high0);
5002 high = range_predecessor (low1);
5003 if (low == 0 || high == 0)
5013 in_p = 0, low = low0, high = high0;
5015 in_p = 0, low = low0, high = high1;
5018 *pin_p = in_p, *plow = low, *phigh = high;
5023 /* Subroutine of fold, looking inside expressions of the form
5024 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5025 of the COND_EXPR. This function is being used also to optimize
5026 A op B ? C : A, by reversing the comparison first.
5028 Return a folded expression whose code is not a COND_EXPR
5029 anymore, or NULL_TREE if no folding opportunity is found. */
5032 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5034 enum tree_code comp_code = TREE_CODE (arg0);
5035 tree arg00 = TREE_OPERAND (arg0, 0);
5036 tree arg01 = TREE_OPERAND (arg0, 1);
5037 tree arg1_type = TREE_TYPE (arg1);
5043 /* If we have A op 0 ? A : -A, consider applying the following
5046 A == 0? A : -A same as -A
5047 A != 0? A : -A same as A
5048 A >= 0? A : -A same as abs (A)
5049 A > 0? A : -A same as abs (A)
5050 A <= 0? A : -A same as -abs (A)
5051 A < 0? A : -A same as -abs (A)
5053 None of these transformations work for modes with signed
5054 zeros. If A is +/-0, the first two transformations will
5055 change the sign of the result (from +0 to -0, or vice
5056 versa). The last four will fix the sign of the result,
5057 even though the original expressions could be positive or
5058 negative, depending on the sign of A.
5060 Note that all these transformations are correct if A is
5061 NaN, since the two alternatives (A and -A) are also NaNs. */
5062 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5063 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5064 ? real_zerop (arg01)
5065 : integer_zerop (arg01))
5066 && ((TREE_CODE (arg2) == NEGATE_EXPR
5067 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5068 /* In the case that A is of the form X-Y, '-A' (arg2) may
5069 have already been folded to Y-X, check for that. */
5070 || (TREE_CODE (arg1) == MINUS_EXPR
5071 && TREE_CODE (arg2) == MINUS_EXPR
5072 && operand_equal_p (TREE_OPERAND (arg1, 0),
5073 TREE_OPERAND (arg2, 1), 0)
5074 && operand_equal_p (TREE_OPERAND (arg1, 1),
5075 TREE_OPERAND (arg2, 0), 0))))
5080 tem = fold_convert (arg1_type, arg1);
5081 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5084 return pedantic_non_lvalue (fold_convert (type, arg1));
5087 if (flag_trapping_math)
5092 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5093 arg1 = fold_convert (signed_type_for
5094 (TREE_TYPE (arg1)), arg1);
5095 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5096 return pedantic_non_lvalue (fold_convert (type, tem));
5099 if (flag_trapping_math)
5103 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5104 arg1 = fold_convert (signed_type_for
5105 (TREE_TYPE (arg1)), arg1);
5106 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5107 return negate_expr (fold_convert (type, tem));
5109 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5113 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5114 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5115 both transformations are correct when A is NaN: A != 0
5116 is then true, and A == 0 is false. */
5118 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5119 && integer_zerop (arg01) && integer_zerop (arg2))
5121 if (comp_code == NE_EXPR)
5122 return pedantic_non_lvalue (fold_convert (type, arg1));
5123 else if (comp_code == EQ_EXPR)
5124 return build_int_cst (type, 0);
5127 /* Try some transformations of A op B ? A : B.
5129 A == B? A : B same as B
5130 A != B? A : B same as A
5131 A >= B? A : B same as max (A, B)
5132 A > B? A : B same as max (B, A)
5133 A <= B? A : B same as min (A, B)
5134 A < B? A : B same as min (B, A)
5136 As above, these transformations don't work in the presence
5137 of signed zeros. For example, if A and B are zeros of
5138 opposite sign, the first two transformations will change
5139 the sign of the result. In the last four, the original
5140 expressions give different results for (A=+0, B=-0) and
5141 (A=-0, B=+0), but the transformed expressions do not.
5143 The first two transformations are correct if either A or B
5144 is a NaN. In the first transformation, the condition will
5145 be false, and B will indeed be chosen. In the case of the
5146 second transformation, the condition A != B will be true,
5147 and A will be chosen.
5149 The conversions to max() and min() are not correct if B is
5150 a number and A is not. The conditions in the original
5151 expressions will be false, so all four give B. The min()
5152 and max() versions would give a NaN instead. */
5153 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5154 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5155 /* Avoid these transformations if the COND_EXPR may be used
5156 as an lvalue in the C++ front-end. PR c++/19199. */
5158 || (strcmp (lang_hooks.name, "GNU C++") != 0
5159 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5160 || ! maybe_lvalue_p (arg1)
5161 || ! maybe_lvalue_p (arg2)))
5163 tree comp_op0 = arg00;
5164 tree comp_op1 = arg01;
5165 tree comp_type = TREE_TYPE (comp_op0);
5167 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5168 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5178 return pedantic_non_lvalue (fold_convert (type, arg2));
5180 return pedantic_non_lvalue (fold_convert (type, arg1));
5185 /* In C++ a ?: expression can be an lvalue, so put the
5186 operand which will be used if they are equal first
5187 so that we can convert this back to the
5188 corresponding COND_EXPR. */
5189 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5191 comp_op0 = fold_convert (comp_type, comp_op0);
5192 comp_op1 = fold_convert (comp_type, comp_op1);
5193 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5194 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5195 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5196 return pedantic_non_lvalue (fold_convert (type, tem));
5203 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5205 comp_op0 = fold_convert (comp_type, comp_op0);
5206 comp_op1 = fold_convert (comp_type, comp_op1);
5207 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5208 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5209 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5210 return pedantic_non_lvalue (fold_convert (type, tem));
5214 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5215 return pedantic_non_lvalue (fold_convert (type, arg2));
5218 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5219 return pedantic_non_lvalue (fold_convert (type, arg1));
5222 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5227 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5228 we might still be able to simplify this. For example,
5229 if C1 is one less or one more than C2, this might have started
5230 out as a MIN or MAX and been transformed by this function.
5231 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5233 if (INTEGRAL_TYPE_P (type)
5234 && TREE_CODE (arg01) == INTEGER_CST
5235 && TREE_CODE (arg2) == INTEGER_CST)
5239 /* We can replace A with C1 in this case. */
5240 arg1 = fold_convert (type, arg01);
5241 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5244 /* If C1 is C2 + 1, this is min(A, C2). */
5245 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5247 && operand_equal_p (arg01,
5248 const_binop (PLUS_EXPR, arg2,
5249 build_int_cst (type, 1), 0),
5251 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5253 fold_convert (type, arg1),
5258 /* If C1 is C2 - 1, this is min(A, C2). */
5259 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5261 && operand_equal_p (arg01,
5262 const_binop (MINUS_EXPR, arg2,
5263 build_int_cst (type, 1), 0),
5265 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5267 fold_convert (type, arg1),
5272 /* If C1 is C2 - 1, this is max(A, C2). */
5273 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5275 && operand_equal_p (arg01,
5276 const_binop (MINUS_EXPR, arg2,
5277 build_int_cst (type, 1), 0),
5279 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5281 fold_convert (type, arg1),
5286 /* If C1 is C2 + 1, this is max(A, C2). */
5287 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5289 && operand_equal_p (arg01,
5290 const_binop (PLUS_EXPR, arg2,
5291 build_int_cst (type, 1), 0),
5293 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5295 fold_convert (type, arg1),
5309 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5310 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5311 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5315 /* EXP is some logical combination of boolean tests. See if we can
5316 merge it into some range test. Return the new tree if so. */
5319 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5321 int or_op = (code == TRUTH_ORIF_EXPR
5322 || code == TRUTH_OR_EXPR);
5323 int in0_p, in1_p, in_p;
5324 tree low0, low1, low, high0, high1, high;
5325 bool strict_overflow_p = false;
5326 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5327 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5329 const char * const warnmsg = G_("assuming signed overflow does not occur "
5330 "when simplifying range test");
5332 /* If this is an OR operation, invert both sides; we will invert
5333 again at the end. */
5335 in0_p = ! in0_p, in1_p = ! in1_p;
5337 /* If both expressions are the same, if we can merge the ranges, and we
5338 can build the range test, return it or it inverted. If one of the
5339 ranges is always true or always false, consider it to be the same
5340 expression as the other. */
5341 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5342 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5344 && 0 != (tem = (build_range_check (type,
5346 : rhs != 0 ? rhs : integer_zero_node,
5349 if (strict_overflow_p)
5350 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5351 return or_op ? invert_truthvalue (tem) : tem;
5354 /* On machines where the branch cost is expensive, if this is a
5355 short-circuited branch and the underlying object on both sides
5356 is the same, make a non-short-circuit operation. */
5357 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5358 && lhs != 0 && rhs != 0
5359 && (code == TRUTH_ANDIF_EXPR
5360 || code == TRUTH_ORIF_EXPR)
5361 && operand_equal_p (lhs, rhs, 0))
5363 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5364 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5365 which cases we can't do this. */
5366 if (simple_operand_p (lhs))
5367 return build2 (code == TRUTH_ANDIF_EXPR
5368 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5371 else if (lang_hooks.decls.global_bindings_p () == 0
5372 && ! CONTAINS_PLACEHOLDER_P (lhs))
5374 tree common = save_expr (lhs);
5376 if (0 != (lhs = build_range_check (type, common,
5377 or_op ? ! in0_p : in0_p,
5379 && (0 != (rhs = build_range_check (type, common,
5380 or_op ? ! in1_p : in1_p,
5383 if (strict_overflow_p)
5384 fold_overflow_warning (warnmsg,
5385 WARN_STRICT_OVERFLOW_COMPARISON);
5386 return build2 (code == TRUTH_ANDIF_EXPR
5387 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5396 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5397 bit value. Arrange things so the extra bits will be set to zero if and
5398 only if C is signed-extended to its full width. If MASK is nonzero,
5399 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5402 unextend (tree c, int p, int unsignedp, tree mask)
5404 tree type = TREE_TYPE (c);
5405 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5408 if (p == modesize || unsignedp)
5411 /* We work by getting just the sign bit into the low-order bit, then
5412 into the high-order bit, then sign-extend. We then XOR that value
5414 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5415 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5417 /* We must use a signed type in order to get an arithmetic right shift.
5418 However, we must also avoid introducing accidental overflows, so that
5419 a subsequent call to integer_zerop will work. Hence we must
5420 do the type conversion here. At this point, the constant is either
5421 zero or one, and the conversion to a signed type can never overflow.
5422 We could get an overflow if this conversion is done anywhere else. */
5423 if (TYPE_UNSIGNED (type))
5424 temp = fold_convert (signed_type_for (type), temp);
5426 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5427 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5429 temp = const_binop (BIT_AND_EXPR, temp,
5430 fold_convert (TREE_TYPE (c), mask), 0);
5431 /* If necessary, convert the type back to match the type of C. */
5432 if (TYPE_UNSIGNED (type))
5433 temp = fold_convert (type, temp);
5435 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5438 /* Find ways of folding logical expressions of LHS and RHS:
5439 Try to merge two comparisons to the same innermost item.
5440 Look for range tests like "ch >= '0' && ch <= '9'".
5441 Look for combinations of simple terms on machines with expensive branches
5442 and evaluate the RHS unconditionally.
5444 For example, if we have p->a == 2 && p->b == 4 and we can make an
5445 object large enough to span both A and B, we can do this with a comparison
5446 against the object ANDed with the a mask.
5448 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5449 operations to do this with one comparison.
5451 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5452 function and the one above.
5454 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5455 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5457 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5460 We return the simplified tree or 0 if no optimization is possible. */
5463 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5465 /* If this is the "or" of two comparisons, we can do something if
5466 the comparisons are NE_EXPR. If this is the "and", we can do something
5467 if the comparisons are EQ_EXPR. I.e.,
5468 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5470 WANTED_CODE is this operation code. For single bit fields, we can
5471 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5472 comparison for one-bit fields. */
5474 enum tree_code wanted_code;
5475 enum tree_code lcode, rcode;
5476 tree ll_arg, lr_arg, rl_arg, rr_arg;
5477 tree ll_inner, lr_inner, rl_inner, rr_inner;
5478 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5479 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5480 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5481 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5482 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5483 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5484 enum machine_mode lnmode, rnmode;
5485 tree ll_mask, lr_mask, rl_mask, rr_mask;
5486 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5487 tree l_const, r_const;
5488 tree lntype, rntype, result;
5489 HOST_WIDE_INT first_bit, end_bit;
5491 tree orig_lhs = lhs, orig_rhs = rhs;
5492 enum tree_code orig_code = code;
5494 /* Start by getting the comparison codes. Fail if anything is volatile.
5495 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5496 it were surrounded with a NE_EXPR. */
5498 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5501 lcode = TREE_CODE (lhs);
5502 rcode = TREE_CODE (rhs);
5504 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5506 lhs = build2 (NE_EXPR, truth_type, lhs,
5507 build_int_cst (TREE_TYPE (lhs), 0));
5511 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5513 rhs = build2 (NE_EXPR, truth_type, rhs,
5514 build_int_cst (TREE_TYPE (rhs), 0));
5518 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5519 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5522 ll_arg = TREE_OPERAND (lhs, 0);
5523 lr_arg = TREE_OPERAND (lhs, 1);
5524 rl_arg = TREE_OPERAND (rhs, 0);
5525 rr_arg = TREE_OPERAND (rhs, 1);
5527 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5528 if (simple_operand_p (ll_arg)
5529 && simple_operand_p (lr_arg))
5532 if (operand_equal_p (ll_arg, rl_arg, 0)
5533 && operand_equal_p (lr_arg, rr_arg, 0))
5535 result = combine_comparisons (code, lcode, rcode,
5536 truth_type, ll_arg, lr_arg);
5540 else if (operand_equal_p (ll_arg, rr_arg, 0)
5541 && operand_equal_p (lr_arg, rl_arg, 0))
5543 result = combine_comparisons (code, lcode,
5544 swap_tree_comparison (rcode),
5545 truth_type, ll_arg, lr_arg);
5551 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5552 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5554 /* If the RHS can be evaluated unconditionally and its operands are
5555 simple, it wins to evaluate the RHS unconditionally on machines
5556 with expensive branches. In this case, this isn't a comparison
5557 that can be merged. Avoid doing this if the RHS is a floating-point
5558 comparison since those can trap. */
5560 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5562 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5563 && simple_operand_p (rl_arg)
5564 && simple_operand_p (rr_arg))
5566 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5567 if (code == TRUTH_OR_EXPR
5568 && lcode == NE_EXPR && integer_zerop (lr_arg)
5569 && rcode == NE_EXPR && integer_zerop (rr_arg)
5570 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5571 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5572 return build2 (NE_EXPR, truth_type,
5573 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5575 build_int_cst (TREE_TYPE (ll_arg), 0));
5577 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5578 if (code == TRUTH_AND_EXPR
5579 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5580 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5581 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5582 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5583 return build2 (EQ_EXPR, truth_type,
5584 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5586 build_int_cst (TREE_TYPE (ll_arg), 0));
5588 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5590 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5591 return build2 (code, truth_type, lhs, rhs);
5596 /* See if the comparisons can be merged. Then get all the parameters for
5599 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5600 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5604 ll_inner = decode_field_reference (ll_arg,
5605 &ll_bitsize, &ll_bitpos, &ll_mode,
5606 &ll_unsignedp, &volatilep, &ll_mask,
5608 lr_inner = decode_field_reference (lr_arg,
5609 &lr_bitsize, &lr_bitpos, &lr_mode,
5610 &lr_unsignedp, &volatilep, &lr_mask,
5612 rl_inner = decode_field_reference (rl_arg,
5613 &rl_bitsize, &rl_bitpos, &rl_mode,
5614 &rl_unsignedp, &volatilep, &rl_mask,
5616 rr_inner = decode_field_reference (rr_arg,
5617 &rr_bitsize, &rr_bitpos, &rr_mode,
5618 &rr_unsignedp, &volatilep, &rr_mask,
5621 /* It must be true that the inner operation on the lhs of each
5622 comparison must be the same if we are to be able to do anything.
5623 Then see if we have constants. If not, the same must be true for
5625 if (volatilep || ll_inner == 0 || rl_inner == 0
5626 || ! operand_equal_p (ll_inner, rl_inner, 0))
5629 if (TREE_CODE (lr_arg) == INTEGER_CST
5630 && TREE_CODE (rr_arg) == INTEGER_CST)
5631 l_const = lr_arg, r_const = rr_arg;
5632 else if (lr_inner == 0 || rr_inner == 0
5633 || ! operand_equal_p (lr_inner, rr_inner, 0))
5636 l_const = r_const = 0;
5638 /* If either comparison code is not correct for our logical operation,
5639 fail. However, we can convert a one-bit comparison against zero into
5640 the opposite comparison against that bit being set in the field. */
5642 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5643 if (lcode != wanted_code)
5645 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5647 /* Make the left operand unsigned, since we are only interested
5648 in the value of one bit. Otherwise we are doing the wrong
5657 /* This is analogous to the code for l_const above. */
5658 if (rcode != wanted_code)
5660 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5669 /* See if we can find a mode that contains both fields being compared on
5670 the left. If we can't, fail. Otherwise, update all constants and masks
5671 to be relative to a field of that size. */
5672 first_bit = MIN (ll_bitpos, rl_bitpos);
5673 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5674 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5675 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5677 if (lnmode == VOIDmode)
5680 lnbitsize = GET_MODE_BITSIZE (lnmode);
5681 lnbitpos = first_bit & ~ (lnbitsize - 1);
5682 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5683 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5685 if (BYTES_BIG_ENDIAN)
5687 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5688 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5691 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5692 size_int (xll_bitpos), 0);
5693 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5694 size_int (xrl_bitpos), 0);
5698 l_const = fold_convert (lntype, l_const);
5699 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5700 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5701 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5702 fold_build1 (BIT_NOT_EXPR,
5706 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5708 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5713 r_const = fold_convert (lntype, r_const);
5714 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5715 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5716 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5717 fold_build1 (BIT_NOT_EXPR,
5721 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5723 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5727 /* If the right sides are not constant, do the same for it. Also,
5728 disallow this optimization if a size or signedness mismatch occurs
5729 between the left and right sides. */
5732 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5733 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5734 /* Make sure the two fields on the right
5735 correspond to the left without being swapped. */
5736 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5739 first_bit = MIN (lr_bitpos, rr_bitpos);
5740 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5741 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5742 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5744 if (rnmode == VOIDmode)
5747 rnbitsize = GET_MODE_BITSIZE (rnmode);
5748 rnbitpos = first_bit & ~ (rnbitsize - 1);
5749 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5750 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5752 if (BYTES_BIG_ENDIAN)
5754 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5755 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5758 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5759 size_int (xlr_bitpos), 0);
5760 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5761 size_int (xrr_bitpos), 0);
5763 /* Make a mask that corresponds to both fields being compared.
5764 Do this for both items being compared. If the operands are the
5765 same size and the bits being compared are in the same position
5766 then we can do this by masking both and comparing the masked
5768 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5769 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5770 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5772 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5773 ll_unsignedp || rl_unsignedp);
5774 if (! all_ones_mask_p (ll_mask, lnbitsize))
5775 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5777 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5778 lr_unsignedp || rr_unsignedp);
5779 if (! all_ones_mask_p (lr_mask, rnbitsize))
5780 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5782 return build2 (wanted_code, truth_type, lhs, rhs);
5785 /* There is still another way we can do something: If both pairs of
5786 fields being compared are adjacent, we may be able to make a wider
5787 field containing them both.
5789 Note that we still must mask the lhs/rhs expressions. Furthermore,
5790 the mask must be shifted to account for the shift done by
5791 make_bit_field_ref. */
5792 if ((ll_bitsize + ll_bitpos == rl_bitpos
5793 && lr_bitsize + lr_bitpos == rr_bitpos)
5794 || (ll_bitpos == rl_bitpos + rl_bitsize
5795 && lr_bitpos == rr_bitpos + rr_bitsize))
5799 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5800 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5801 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5802 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5804 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5805 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5806 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5807 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5809 /* Convert to the smaller type before masking out unwanted bits. */
5811 if (lntype != rntype)
5813 if (lnbitsize > rnbitsize)
5815 lhs = fold_convert (rntype, lhs);
5816 ll_mask = fold_convert (rntype, ll_mask);
5819 else if (lnbitsize < rnbitsize)
5821 rhs = fold_convert (lntype, rhs);
5822 lr_mask = fold_convert (lntype, lr_mask);
5827 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5828 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5830 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5831 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5833 return build2 (wanted_code, truth_type, lhs, rhs);
5839 /* Handle the case of comparisons with constants. If there is something in
5840 common between the masks, those bits of the constants must be the same.
5841 If not, the condition is always false. Test for this to avoid generating
5842 incorrect code below. */
5843 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5844 if (! integer_zerop (result)
5845 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5846 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5848 if (wanted_code == NE_EXPR)
5850 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5851 return constant_boolean_node (true, truth_type);
5855 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5856 return constant_boolean_node (false, truth_type);
5860 /* Construct the expression we will return. First get the component
5861 reference we will make. Unless the mask is all ones the width of
5862 that field, perform the mask operation. Then compare with the
5864 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5865 ll_unsignedp || rl_unsignedp);
5867 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5868 if (! all_ones_mask_p (ll_mask, lnbitsize))
5869 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5871 return build2 (wanted_code, truth_type, result,
5872 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5875 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5879 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5882 enum tree_code op_code;
5885 int consts_equal, consts_lt;
5888 STRIP_SIGN_NOPS (arg0);
5890 op_code = TREE_CODE (arg0);
5891 minmax_const = TREE_OPERAND (arg0, 1);
5892 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5893 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5894 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5895 inner = TREE_OPERAND (arg0, 0);
5897 /* If something does not permit us to optimize, return the original tree. */
5898 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5899 || TREE_CODE (comp_const) != INTEGER_CST
5900 || TREE_OVERFLOW (comp_const)
5901 || TREE_CODE (minmax_const) != INTEGER_CST
5902 || TREE_OVERFLOW (minmax_const))
5905 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5906 and GT_EXPR, doing the rest with recursive calls using logical
5910 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5912 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5915 return invert_truthvalue (tem);
5921 fold_build2 (TRUTH_ORIF_EXPR, type,
5922 optimize_minmax_comparison
5923 (EQ_EXPR, type, arg0, comp_const),
5924 optimize_minmax_comparison
5925 (GT_EXPR, type, arg0, comp_const));
5928 if (op_code == MAX_EXPR && consts_equal)
5929 /* MAX (X, 0) == 0 -> X <= 0 */
5930 return fold_build2 (LE_EXPR, type, inner, comp_const);
5932 else if (op_code == MAX_EXPR && consts_lt)
5933 /* MAX (X, 0) == 5 -> X == 5 */
5934 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5936 else if (op_code == MAX_EXPR)
5937 /* MAX (X, 0) == -1 -> false */
5938 return omit_one_operand (type, integer_zero_node, inner);
5940 else if (consts_equal)
5941 /* MIN (X, 0) == 0 -> X >= 0 */
5942 return fold_build2 (GE_EXPR, type, inner, comp_const);
5945 /* MIN (X, 0) == 5 -> false */
5946 return omit_one_operand (type, integer_zero_node, inner);
5949 /* MIN (X, 0) == -1 -> X == -1 */
5950 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5953 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5954 /* MAX (X, 0) > 0 -> X > 0
5955 MAX (X, 0) > 5 -> X > 5 */
5956 return fold_build2 (GT_EXPR, type, inner, comp_const);
5958 else if (op_code == MAX_EXPR)
5959 /* MAX (X, 0) > -1 -> true */
5960 return omit_one_operand (type, integer_one_node, inner);
5962 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5963 /* MIN (X, 0) > 0 -> false
5964 MIN (X, 0) > 5 -> false */
5965 return omit_one_operand (type, integer_zero_node, inner);
5968 /* MIN (X, 0) > -1 -> X > -1 */
5969 return fold_build2 (GT_EXPR, type, inner, comp_const);
5976 /* T is an integer expression that is being multiplied, divided, or taken a
5977 modulus (CODE says which and what kind of divide or modulus) by a
5978 constant C. See if we can eliminate that operation by folding it with
5979 other operations already in T. WIDE_TYPE, if non-null, is a type that
5980 should be used for the computation if wider than our type.
5982 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5983 (X * 2) + (Y * 4). We must, however, be assured that either the original
5984 expression would not overflow or that overflow is undefined for the type
5985 in the language in question.
5987 If we return a non-null expression, it is an equivalent form of the
5988 original computation, but need not be in the original type.
5990 We set *STRICT_OVERFLOW_P to true if the return values depends on
5991 signed overflow being undefined. Otherwise we do not change
5992 *STRICT_OVERFLOW_P. */
5995 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5996 bool *strict_overflow_p)
5998 /* To avoid exponential search depth, refuse to allow recursion past
5999 three levels. Beyond that (1) it's highly unlikely that we'll find
6000 something interesting and (2) we've probably processed it before
6001 when we built the inner expression. */
6010 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6017 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6018 bool *strict_overflow_p)
6020 tree type = TREE_TYPE (t);
6021 enum tree_code tcode = TREE_CODE (t);
6022 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6023 > GET_MODE_SIZE (TYPE_MODE (type)))
6024 ? wide_type : type);
6026 int same_p = tcode == code;
6027 tree op0 = NULL_TREE, op1 = NULL_TREE;
6028 bool sub_strict_overflow_p;
6030 /* Don't deal with constants of zero here; they confuse the code below. */
6031 if (integer_zerop (c))
6034 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6035 op0 = TREE_OPERAND (t, 0);
6037 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6038 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6040 /* Note that we need not handle conditional operations here since fold
6041 already handles those cases. So just do arithmetic here. */
6045 /* For a constant, we can always simplify if we are a multiply
6046 or (for divide and modulus) if it is a multiple of our constant. */
6047 if (code == MULT_EXPR
6048 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6049 return const_binop (code, fold_convert (ctype, t),
6050 fold_convert (ctype, c), 0);
6053 CASE_CONVERT: case NON_LVALUE_EXPR:
6054 /* If op0 is an expression ... */
6055 if ((COMPARISON_CLASS_P (op0)
6056 || UNARY_CLASS_P (op0)
6057 || BINARY_CLASS_P (op0)
6058 || VL_EXP_CLASS_P (op0)
6059 || EXPRESSION_CLASS_P (op0))
6060 /* ... and has wrapping overflow, and its type is smaller
6061 than ctype, then we cannot pass through as widening. */
6062 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6063 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6064 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6065 && (TYPE_PRECISION (ctype)
6066 > TYPE_PRECISION (TREE_TYPE (op0))))
6067 /* ... or this is a truncation (t is narrower than op0),
6068 then we cannot pass through this narrowing. */
6069 || (TYPE_PRECISION (type)
6070 < TYPE_PRECISION (TREE_TYPE (op0)))
6071 /* ... or signedness changes for division or modulus,
6072 then we cannot pass through this conversion. */
6073 || (code != MULT_EXPR
6074 && (TYPE_UNSIGNED (ctype)
6075 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6076 /* ... or has undefined overflow while the converted to
6077 type has not, we cannot do the operation in the inner type
6078 as that would introduce undefined overflow. */
6079 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6080 && !TYPE_OVERFLOW_UNDEFINED (type))))
6083 /* Pass the constant down and see if we can make a simplification. If
6084 we can, replace this expression with the inner simplification for
6085 possible later conversion to our or some other type. */
6086 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6087 && TREE_CODE (t2) == INTEGER_CST
6088 && !TREE_OVERFLOW (t2)
6089 && (0 != (t1 = extract_muldiv (op0, t2, code,
6091 ? ctype : NULL_TREE,
6092 strict_overflow_p))))
6097 /* If widening the type changes it from signed to unsigned, then we
6098 must avoid building ABS_EXPR itself as unsigned. */
6099 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6101 tree cstype = (*signed_type_for) (ctype);
6102 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6105 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6106 return fold_convert (ctype, t1);
6110 /* If the constant is negative, we cannot simplify this. */
6111 if (tree_int_cst_sgn (c) == -1)
6115 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6117 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6120 case MIN_EXPR: case MAX_EXPR:
6121 /* If widening the type changes the signedness, then we can't perform
6122 this optimization as that changes the result. */
6123 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6126 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6127 sub_strict_overflow_p = false;
6128 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6129 &sub_strict_overflow_p)) != 0
6130 && (t2 = extract_muldiv (op1, c, code, wide_type,
6131 &sub_strict_overflow_p)) != 0)
6133 if (tree_int_cst_sgn (c) < 0)
6134 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6135 if (sub_strict_overflow_p)
6136 *strict_overflow_p = true;
6137 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6138 fold_convert (ctype, t2));
6142 case LSHIFT_EXPR: case RSHIFT_EXPR:
6143 /* If the second operand is constant, this is a multiplication
6144 or floor division, by a power of two, so we can treat it that
6145 way unless the multiplier or divisor overflows. Signed
6146 left-shift overflow is implementation-defined rather than
6147 undefined in C90, so do not convert signed left shift into
6149 if (TREE_CODE (op1) == INTEGER_CST
6150 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6151 /* const_binop may not detect overflow correctly,
6152 so check for it explicitly here. */
6153 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6154 && TREE_INT_CST_HIGH (op1) == 0
6155 && 0 != (t1 = fold_convert (ctype,
6156 const_binop (LSHIFT_EXPR,
6159 && !TREE_OVERFLOW (t1))
6160 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6161 ? MULT_EXPR : FLOOR_DIV_EXPR,
6162 ctype, fold_convert (ctype, op0), t1),
6163 c, code, wide_type, strict_overflow_p);
6166 case PLUS_EXPR: case MINUS_EXPR:
6167 /* See if we can eliminate the operation on both sides. If we can, we
6168 can return a new PLUS or MINUS. If we can't, the only remaining
6169 cases where we can do anything are if the second operand is a
6171 sub_strict_overflow_p = false;
6172 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6173 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6174 if (t1 != 0 && t2 != 0
6175 && (code == MULT_EXPR
6176 /* If not multiplication, we can only do this if both operands
6177 are divisible by c. */
6178 || (multiple_of_p (ctype, op0, c)
6179 && multiple_of_p (ctype, op1, c))))
6181 if (sub_strict_overflow_p)
6182 *strict_overflow_p = true;
6183 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6184 fold_convert (ctype, t2));
6187 /* If this was a subtraction, negate OP1 and set it to be an addition.
6188 This simplifies the logic below. */
6189 if (tcode == MINUS_EXPR)
6190 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6192 if (TREE_CODE (op1) != INTEGER_CST)
6195 /* If either OP1 or C are negative, this optimization is not safe for
6196 some of the division and remainder types while for others we need
6197 to change the code. */
6198 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6200 if (code == CEIL_DIV_EXPR)
6201 code = FLOOR_DIV_EXPR;
6202 else if (code == FLOOR_DIV_EXPR)
6203 code = CEIL_DIV_EXPR;
6204 else if (code != MULT_EXPR
6205 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6209 /* If it's a multiply or a division/modulus operation of a multiple
6210 of our constant, do the operation and verify it doesn't overflow. */
6211 if (code == MULT_EXPR
6212 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6214 op1 = const_binop (code, fold_convert (ctype, op1),
6215 fold_convert (ctype, c), 0);
6216 /* We allow the constant to overflow with wrapping semantics. */
6218 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6224 /* If we have an unsigned type is not a sizetype, we cannot widen
6225 the operation since it will change the result if the original
6226 computation overflowed. */
6227 if (TYPE_UNSIGNED (ctype)
6228 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6232 /* If we were able to eliminate our operation from the first side,
6233 apply our operation to the second side and reform the PLUS. */
6234 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6235 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6237 /* The last case is if we are a multiply. In that case, we can
6238 apply the distributive law to commute the multiply and addition
6239 if the multiplication of the constants doesn't overflow. */
6240 if (code == MULT_EXPR)
6241 return fold_build2 (tcode, ctype,
6242 fold_build2 (code, ctype,
6243 fold_convert (ctype, op0),
6244 fold_convert (ctype, c)),
6250 /* We have a special case here if we are doing something like
6251 (C * 8) % 4 since we know that's zero. */
6252 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6253 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6254 /* If the multiplication can overflow we cannot optimize this.
6255 ??? Until we can properly mark individual operations as
6256 not overflowing we need to treat sizetype special here as
6257 stor-layout relies on this opimization to make
6258 DECL_FIELD_BIT_OFFSET always a constant. */
6259 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6260 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6261 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6262 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6263 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6265 *strict_overflow_p = true;
6266 return omit_one_operand (type, integer_zero_node, op0);
6269 /* ... fall through ... */
6271 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6272 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6273 /* If we can extract our operation from the LHS, do so and return a
6274 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6275 do something only if the second operand is a constant. */
6277 && (t1 = extract_muldiv (op0, c, code, wide_type,
6278 strict_overflow_p)) != 0)
6279 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6280 fold_convert (ctype, op1));
6281 else if (tcode == MULT_EXPR && code == MULT_EXPR
6282 && (t1 = extract_muldiv (op1, c, code, wide_type,
6283 strict_overflow_p)) != 0)
6284 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6285 fold_convert (ctype, t1));
6286 else if (TREE_CODE (op1) != INTEGER_CST)
6289 /* If these are the same operation types, we can associate them
6290 assuming no overflow. */
6292 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6293 fold_convert (ctype, c), 1))
6294 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6295 TREE_INT_CST_HIGH (t1),
6296 (TYPE_UNSIGNED (ctype)
6297 && tcode != MULT_EXPR) ? -1 : 1,
6298 TREE_OVERFLOW (t1)))
6299 && !TREE_OVERFLOW (t1))
6300 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6302 /* If these operations "cancel" each other, we have the main
6303 optimizations of this pass, which occur when either constant is a
6304 multiple of the other, in which case we replace this with either an
6305 operation or CODE or TCODE.
6307 If we have an unsigned type that is not a sizetype, we cannot do
6308 this since it will change the result if the original computation
6310 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6311 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6312 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6313 || (tcode == MULT_EXPR
6314 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6315 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6316 && code != MULT_EXPR)))
6318 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6320 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6321 *strict_overflow_p = true;
6322 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6323 fold_convert (ctype,
6324 const_binop (TRUNC_DIV_EXPR,
6327 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6329 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6330 *strict_overflow_p = true;
6331 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6332 fold_convert (ctype,
6333 const_binop (TRUNC_DIV_EXPR,
6346 /* Return a node which has the indicated constant VALUE (either 0 or
6347 1), and is of the indicated TYPE. */
6350 constant_boolean_node (int value, tree type)
6352 if (type == integer_type_node)
6353 return value ? integer_one_node : integer_zero_node;
6354 else if (type == boolean_type_node)
6355 return value ? boolean_true_node : boolean_false_node;
6357 return build_int_cst (type, value);
6361 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6362 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6363 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6364 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6365 COND is the first argument to CODE; otherwise (as in the example
6366 given here), it is the second argument. TYPE is the type of the
6367 original expression. Return NULL_TREE if no simplification is
6371 fold_binary_op_with_conditional_arg (enum tree_code code,
6372 tree type, tree op0, tree op1,
6373 tree cond, tree arg, int cond_first_p)
6375 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6376 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6377 tree test, true_value, false_value;
6378 tree lhs = NULL_TREE;
6379 tree rhs = NULL_TREE;
6381 /* This transformation is only worthwhile if we don't have to wrap
6382 arg in a SAVE_EXPR, and the operation can be simplified on at least
6383 one of the branches once its pushed inside the COND_EXPR. */
6384 if (!TREE_CONSTANT (arg))
6387 if (TREE_CODE (cond) == COND_EXPR)
6389 test = TREE_OPERAND (cond, 0);
6390 true_value = TREE_OPERAND (cond, 1);
6391 false_value = TREE_OPERAND (cond, 2);
6392 /* If this operand throws an expression, then it does not make
6393 sense to try to perform a logical or arithmetic operation
6395 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6397 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6402 tree testtype = TREE_TYPE (cond);
6404 true_value = constant_boolean_node (true, testtype);
6405 false_value = constant_boolean_node (false, testtype);
6408 arg = fold_convert (arg_type, arg);
6411 true_value = fold_convert (cond_type, true_value);
6413 lhs = fold_build2 (code, type, true_value, arg);
6415 lhs = fold_build2 (code, type, arg, true_value);
6419 false_value = fold_convert (cond_type, false_value);
6421 rhs = fold_build2 (code, type, false_value, arg);
6423 rhs = fold_build2 (code, type, arg, false_value);
6426 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6427 return fold_convert (type, test);
6431 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6433 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6434 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6435 ADDEND is the same as X.
6437 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6438 and finite. The problematic cases are when X is zero, and its mode
6439 has signed zeros. In the case of rounding towards -infinity,
6440 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6441 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6444 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6446 if (!real_zerop (addend))
6449 /* Don't allow the fold with -fsignaling-nans. */
6450 if (HONOR_SNANS (TYPE_MODE (type)))
6453 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6454 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6457 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6458 if (TREE_CODE (addend) == REAL_CST
6459 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6462 /* The mode has signed zeros, and we have to honor their sign.
6463 In this situation, there is only one case we can return true for.
6464 X - 0 is the same as X unless rounding towards -infinity is
6466 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6469 /* Subroutine of fold() that checks comparisons of built-in math
6470 functions against real constants.
6472 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6473 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6474 is the type of the result and ARG0 and ARG1 are the operands of the
6475 comparison. ARG1 must be a TREE_REAL_CST.
6477 The function returns the constant folded tree if a simplification
6478 can be made, and NULL_TREE otherwise. */
6481 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6482 tree type, tree arg0, tree arg1)
6486 if (BUILTIN_SQRT_P (fcode))
6488 tree arg = CALL_EXPR_ARG (arg0, 0);
6489 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6491 c = TREE_REAL_CST (arg1);
6492 if (REAL_VALUE_NEGATIVE (c))
6494 /* sqrt(x) < y is always false, if y is negative. */
6495 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6496 return omit_one_operand (type, integer_zero_node, arg);
6498 /* sqrt(x) > y is always true, if y is negative and we
6499 don't care about NaNs, i.e. negative values of x. */
6500 if (code == NE_EXPR || !HONOR_NANS (mode))
6501 return omit_one_operand (type, integer_one_node, arg);
6503 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6504 return fold_build2 (GE_EXPR, type, arg,
6505 build_real (TREE_TYPE (arg), dconst0));
6507 else if (code == GT_EXPR || code == GE_EXPR)
6511 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6512 real_convert (&c2, mode, &c2);
6514 if (REAL_VALUE_ISINF (c2))
6516 /* sqrt(x) > y is x == +Inf, when y is very large. */
6517 if (HONOR_INFINITIES (mode))
6518 return fold_build2 (EQ_EXPR, type, arg,
6519 build_real (TREE_TYPE (arg), c2));
6521 /* sqrt(x) > y is always false, when y is very large
6522 and we don't care about infinities. */
6523 return omit_one_operand (type, integer_zero_node, arg);
6526 /* sqrt(x) > c is the same as x > c*c. */
6527 return fold_build2 (code, type, arg,
6528 build_real (TREE_TYPE (arg), c2));
6530 else if (code == LT_EXPR || code == LE_EXPR)
6534 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6535 real_convert (&c2, mode, &c2);
6537 if (REAL_VALUE_ISINF (c2))
6539 /* sqrt(x) < y is always true, when y is a very large
6540 value and we don't care about NaNs or Infinities. */
6541 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6542 return omit_one_operand (type, integer_one_node, arg);
6544 /* sqrt(x) < y is x != +Inf when y is very large and we
6545 don't care about NaNs. */
6546 if (! HONOR_NANS (mode))
6547 return fold_build2 (NE_EXPR, type, arg,
6548 build_real (TREE_TYPE (arg), c2));
6550 /* sqrt(x) < y is x >= 0 when y is very large and we
6551 don't care about Infinities. */
6552 if (! HONOR_INFINITIES (mode))
6553 return fold_build2 (GE_EXPR, type, arg,
6554 build_real (TREE_TYPE (arg), dconst0));
6556 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6557 if (lang_hooks.decls.global_bindings_p () != 0
6558 || CONTAINS_PLACEHOLDER_P (arg))
6561 arg = save_expr (arg);
6562 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6563 fold_build2 (GE_EXPR, type, arg,
6564 build_real (TREE_TYPE (arg),
6566 fold_build2 (NE_EXPR, type, arg,
6567 build_real (TREE_TYPE (arg),
6571 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6572 if (! HONOR_NANS (mode))
6573 return fold_build2 (code, type, arg,
6574 build_real (TREE_TYPE (arg), c2));
6576 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6577 if (lang_hooks.decls.global_bindings_p () == 0
6578 && ! CONTAINS_PLACEHOLDER_P (arg))
6580 arg = save_expr (arg);
6581 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6582 fold_build2 (GE_EXPR, type, arg,
6583 build_real (TREE_TYPE (arg),
6585 fold_build2 (code, type, arg,
6586 build_real (TREE_TYPE (arg),
6595 /* Subroutine of fold() that optimizes comparisons against Infinities,
6596 either +Inf or -Inf.
6598 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6599 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6600 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6602 The function returns the constant folded tree if a simplification
6603 can be made, and NULL_TREE otherwise. */
6606 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6608 enum machine_mode mode;
6609 REAL_VALUE_TYPE max;
6613 mode = TYPE_MODE (TREE_TYPE (arg0));
6615 /* For negative infinity swap the sense of the comparison. */
6616 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6618 code = swap_tree_comparison (code);
6623 /* x > +Inf is always false, if with ignore sNANs. */
6624 if (HONOR_SNANS (mode))
6626 return omit_one_operand (type, integer_zero_node, arg0);
6629 /* x <= +Inf is always true, if we don't case about NaNs. */
6630 if (! HONOR_NANS (mode))
6631 return omit_one_operand (type, integer_one_node, arg0);
6633 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6634 if (lang_hooks.decls.global_bindings_p () == 0
6635 && ! CONTAINS_PLACEHOLDER_P (arg0))
6637 arg0 = save_expr (arg0);
6638 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6644 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6645 real_maxval (&max, neg, mode);
6646 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6647 arg0, build_real (TREE_TYPE (arg0), max));
6650 /* x < +Inf is always equal to x <= DBL_MAX. */
6651 real_maxval (&max, neg, mode);
6652 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6653 arg0, build_real (TREE_TYPE (arg0), max));
6656 /* x != +Inf is always equal to !(x > DBL_MAX). */
6657 real_maxval (&max, neg, mode);
6658 if (! HONOR_NANS (mode))
6659 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6660 arg0, build_real (TREE_TYPE (arg0), max));
6662 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6663 arg0, build_real (TREE_TYPE (arg0), max));
6664 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6673 /* Subroutine of fold() that optimizes comparisons of a division by
6674 a nonzero integer constant against an integer constant, i.e.
6677 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6678 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6679 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6681 The function returns the constant folded tree if a simplification
6682 can be made, and NULL_TREE otherwise. */
6685 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6687 tree prod, tmp, hi, lo;
6688 tree arg00 = TREE_OPERAND (arg0, 0);
6689 tree arg01 = TREE_OPERAND (arg0, 1);
6690 unsigned HOST_WIDE_INT lpart;
6691 HOST_WIDE_INT hpart;
6692 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6696 /* We have to do this the hard way to detect unsigned overflow.
6697 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6698 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6699 TREE_INT_CST_HIGH (arg01),
6700 TREE_INT_CST_LOW (arg1),
6701 TREE_INT_CST_HIGH (arg1),
6702 &lpart, &hpart, unsigned_p);
6703 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6705 neg_overflow = false;
6709 tmp = int_const_binop (MINUS_EXPR, arg01,
6710 build_int_cst (TREE_TYPE (arg01), 1), 0);
6713 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6714 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6715 TREE_INT_CST_HIGH (prod),
6716 TREE_INT_CST_LOW (tmp),
6717 TREE_INT_CST_HIGH (tmp),
6718 &lpart, &hpart, unsigned_p);
6719 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6720 -1, overflow | TREE_OVERFLOW (prod));
6722 else if (tree_int_cst_sgn (arg01) >= 0)
6724 tmp = int_const_binop (MINUS_EXPR, arg01,
6725 build_int_cst (TREE_TYPE (arg01), 1), 0);
6726 switch (tree_int_cst_sgn (arg1))
6729 neg_overflow = true;
6730 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6735 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6740 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6750 /* A negative divisor reverses the relational operators. */
6751 code = swap_tree_comparison (code);
6753 tmp = int_const_binop (PLUS_EXPR, arg01,
6754 build_int_cst (TREE_TYPE (arg01), 1), 0);
6755 switch (tree_int_cst_sgn (arg1))
6758 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6763 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6768 neg_overflow = true;
6769 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6781 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6782 return omit_one_operand (type, integer_zero_node, arg00);
6783 if (TREE_OVERFLOW (hi))
6784 return fold_build2 (GE_EXPR, type, arg00, lo);
6785 if (TREE_OVERFLOW (lo))
6786 return fold_build2 (LE_EXPR, type, arg00, hi);
6787 return build_range_check (type, arg00, 1, lo, hi);
6790 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6791 return omit_one_operand (type, integer_one_node, arg00);
6792 if (TREE_OVERFLOW (hi))
6793 return fold_build2 (LT_EXPR, type, arg00, lo);
6794 if (TREE_OVERFLOW (lo))
6795 return fold_build2 (GT_EXPR, type, arg00, hi);
6796 return build_range_check (type, arg00, 0, lo, hi);
6799 if (TREE_OVERFLOW (lo))
6801 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6802 return omit_one_operand (type, tmp, arg00);
6804 return fold_build2 (LT_EXPR, type, arg00, lo);
6807 if (TREE_OVERFLOW (hi))
6809 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6810 return omit_one_operand (type, tmp, arg00);
6812 return fold_build2 (LE_EXPR, type, arg00, hi);
6815 if (TREE_OVERFLOW (hi))
6817 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6818 return omit_one_operand (type, tmp, arg00);
6820 return fold_build2 (GT_EXPR, type, arg00, hi);
6823 if (TREE_OVERFLOW (lo))
6825 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6826 return omit_one_operand (type, tmp, arg00);
6828 return fold_build2 (GE_EXPR, type, arg00, lo);
6838 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6839 equality/inequality test, then return a simplified form of the test
6840 using a sign testing. Otherwise return NULL. TYPE is the desired
6844 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6847 /* If this is testing a single bit, we can optimize the test. */
6848 if ((code == NE_EXPR || code == EQ_EXPR)
6849 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6850 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6852 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6853 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6854 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6856 if (arg00 != NULL_TREE
6857 /* This is only a win if casting to a signed type is cheap,
6858 i.e. when arg00's type is not a partial mode. */
6859 && TYPE_PRECISION (TREE_TYPE (arg00))
6860 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6862 tree stype = signed_type_for (TREE_TYPE (arg00));
6863 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6864 result_type, fold_convert (stype, arg00),
6865 build_int_cst (stype, 0));
6872 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6873 equality/inequality test, then return a simplified form of
6874 the test using shifts and logical operations. Otherwise return
6875 NULL. TYPE is the desired result type. */
6878 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6881 /* If this is testing a single bit, we can optimize the test. */
6882 if ((code == NE_EXPR || code == EQ_EXPR)
6883 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6884 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6886 tree inner = TREE_OPERAND (arg0, 0);
6887 tree type = TREE_TYPE (arg0);
6888 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6889 enum machine_mode operand_mode = TYPE_MODE (type);
6891 tree signed_type, unsigned_type, intermediate_type;
6894 /* First, see if we can fold the single bit test into a sign-bit
6896 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6901 /* Otherwise we have (A & C) != 0 where C is a single bit,
6902 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6903 Similarly for (A & C) == 0. */
6905 /* If INNER is a right shift of a constant and it plus BITNUM does
6906 not overflow, adjust BITNUM and INNER. */
6907 if (TREE_CODE (inner) == RSHIFT_EXPR
6908 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6909 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6910 && bitnum < TYPE_PRECISION (type)
6911 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6912 bitnum - TYPE_PRECISION (type)))
6914 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6915 inner = TREE_OPERAND (inner, 0);
6918 /* If we are going to be able to omit the AND below, we must do our
6919 operations as unsigned. If we must use the AND, we have a choice.
6920 Normally unsigned is faster, but for some machines signed is. */
6921 #ifdef LOAD_EXTEND_OP
6922 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6923 && !flag_syntax_only) ? 0 : 1;
6928 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6929 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6930 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6931 inner = fold_convert (intermediate_type, inner);
6934 inner = build2 (RSHIFT_EXPR, intermediate_type,
6935 inner, size_int (bitnum));
6937 one = build_int_cst (intermediate_type, 1);
6939 if (code == EQ_EXPR)
6940 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6942 /* Put the AND last so it can combine with more things. */
6943 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6945 /* Make sure to return the proper type. */
6946 inner = fold_convert (result_type, inner);
6953 /* Check whether we are allowed to reorder operands arg0 and arg1,
6954 such that the evaluation of arg1 occurs before arg0. */
6957 reorder_operands_p (const_tree arg0, const_tree arg1)
6959 if (! flag_evaluation_order)
6961 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6963 return ! TREE_SIDE_EFFECTS (arg0)
6964 && ! TREE_SIDE_EFFECTS (arg1);
6967 /* Test whether it is preferable two swap two operands, ARG0 and
6968 ARG1, for example because ARG0 is an integer constant and ARG1
6969 isn't. If REORDER is true, only recommend swapping if we can
6970 evaluate the operands in reverse order. */
6973 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6975 STRIP_SIGN_NOPS (arg0);
6976 STRIP_SIGN_NOPS (arg1);
6978 if (TREE_CODE (arg1) == INTEGER_CST)
6980 if (TREE_CODE (arg0) == INTEGER_CST)
6983 if (TREE_CODE (arg1) == REAL_CST)
6985 if (TREE_CODE (arg0) == REAL_CST)
6988 if (TREE_CODE (arg1) == FIXED_CST)
6990 if (TREE_CODE (arg0) == FIXED_CST)
6993 if (TREE_CODE (arg1) == COMPLEX_CST)
6995 if (TREE_CODE (arg0) == COMPLEX_CST)
6998 if (TREE_CONSTANT (arg1))
7000 if (TREE_CONSTANT (arg0))
7003 if (optimize_function_for_size_p (cfun))
7006 if (reorder && flag_evaluation_order
7007 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7010 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7011 for commutative and comparison operators. Ensuring a canonical
7012 form allows the optimizers to find additional redundancies without
7013 having to explicitly check for both orderings. */
7014 if (TREE_CODE (arg0) == SSA_NAME
7015 && TREE_CODE (arg1) == SSA_NAME
7016 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7019 /* Put SSA_NAMEs last. */
7020 if (TREE_CODE (arg1) == SSA_NAME)
7022 if (TREE_CODE (arg0) == SSA_NAME)
7025 /* Put variables last. */
7034 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7035 ARG0 is extended to a wider type. */
7038 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7040 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7042 tree shorter_type, outer_type;
7046 if (arg0_unw == arg0)
7048 shorter_type = TREE_TYPE (arg0_unw);
7050 #ifdef HAVE_canonicalize_funcptr_for_compare
7051 /* Disable this optimization if we're casting a function pointer
7052 type on targets that require function pointer canonicalization. */
7053 if (HAVE_canonicalize_funcptr_for_compare
7054 && TREE_CODE (shorter_type) == POINTER_TYPE
7055 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7059 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7062 arg1_unw = get_unwidened (arg1, NULL_TREE);
7064 /* If possible, express the comparison in the shorter mode. */
7065 if ((code == EQ_EXPR || code == NE_EXPR
7066 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7067 && (TREE_TYPE (arg1_unw) == shorter_type
7068 || ((TYPE_PRECISION (shorter_type)
7069 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7070 && (TYPE_UNSIGNED (shorter_type)
7071 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7072 || (TREE_CODE (arg1_unw) == INTEGER_CST
7073 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7074 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7075 && int_fits_type_p (arg1_unw, shorter_type))))
7076 return fold_build2 (code, type, arg0_unw,
7077 fold_convert (shorter_type, arg1_unw));
7079 if (TREE_CODE (arg1_unw) != INTEGER_CST
7080 || TREE_CODE (shorter_type) != INTEGER_TYPE
7081 || !int_fits_type_p (arg1_unw, shorter_type))
7084 /* If we are comparing with the integer that does not fit into the range
7085 of the shorter type, the result is known. */
7086 outer_type = TREE_TYPE (arg1_unw);
7087 min = lower_bound_in_type (outer_type, shorter_type);
7088 max = upper_bound_in_type (outer_type, shorter_type);
7090 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7092 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7099 return omit_one_operand (type, integer_zero_node, arg0);
7104 return omit_one_operand (type, integer_one_node, arg0);
7110 return omit_one_operand (type, integer_one_node, arg0);
7112 return omit_one_operand (type, integer_zero_node, arg0);
7117 return omit_one_operand (type, integer_zero_node, arg0);
7119 return omit_one_operand (type, integer_one_node, arg0);
7128 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7129 ARG0 just the signedness is changed. */
7132 fold_sign_changed_comparison (enum tree_code code, tree type,
7133 tree arg0, tree arg1)
7136 tree inner_type, outer_type;
7138 if (!CONVERT_EXPR_P (arg0))
7141 outer_type = TREE_TYPE (arg0);
7142 arg0_inner = TREE_OPERAND (arg0, 0);
7143 inner_type = TREE_TYPE (arg0_inner);
7145 #ifdef HAVE_canonicalize_funcptr_for_compare
7146 /* Disable this optimization if we're casting a function pointer
7147 type on targets that require function pointer canonicalization. */
7148 if (HAVE_canonicalize_funcptr_for_compare
7149 && TREE_CODE (inner_type) == POINTER_TYPE
7150 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7154 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7157 /* If the conversion is from an integral subtype to its basetype
7159 if (TREE_TYPE (inner_type) == outer_type)
7162 if (TREE_CODE (arg1) != INTEGER_CST
7163 && !(CONVERT_EXPR_P (arg1)
7164 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7167 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7168 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7173 if (TREE_CODE (arg1) == INTEGER_CST)
7174 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7175 TREE_INT_CST_HIGH (arg1), 0,
7176 TREE_OVERFLOW (arg1));
7178 arg1 = fold_convert (inner_type, arg1);
7180 return fold_build2 (code, type, arg0_inner, arg1);
7183 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7184 step of the array. Reconstructs s and delta in the case of s * delta
7185 being an integer constant (and thus already folded).
7186 ADDR is the address. MULT is the multiplicative expression.
7187 If the function succeeds, the new address expression is returned. Otherwise
7188 NULL_TREE is returned. */
7191 try_move_mult_to_index (tree addr, tree op1)
7193 tree s, delta, step;
7194 tree ref = TREE_OPERAND (addr, 0), pref;
7199 /* Strip the nops that might be added when converting op1 to sizetype. */
7202 /* Canonicalize op1 into a possibly non-constant delta
7203 and an INTEGER_CST s. */
7204 if (TREE_CODE (op1) == MULT_EXPR)
7206 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7211 if (TREE_CODE (arg0) == INTEGER_CST)
7216 else if (TREE_CODE (arg1) == INTEGER_CST)
7224 else if (TREE_CODE (op1) == INTEGER_CST)
7231 /* Simulate we are delta * 1. */
7233 s = integer_one_node;
7236 for (;; ref = TREE_OPERAND (ref, 0))
7238 if (TREE_CODE (ref) == ARRAY_REF)
7240 /* Remember if this was a multi-dimensional array. */
7241 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7244 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7248 step = array_ref_element_size (ref);
7249 if (TREE_CODE (step) != INTEGER_CST)
7254 if (! tree_int_cst_equal (step, s))
7259 /* Try if delta is a multiple of step. */
7260 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7266 /* Only fold here if we can verify we do not overflow one
7267 dimension of a multi-dimensional array. */
7272 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7273 || !INTEGRAL_TYPE_P (itype)
7274 || !TYPE_MAX_VALUE (itype)
7275 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7278 tmp = fold_binary (PLUS_EXPR, itype,
7279 fold_convert (itype,
7280 TREE_OPERAND (ref, 1)),
7281 fold_convert (itype, delta));
7283 || TREE_CODE (tmp) != INTEGER_CST
7284 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7293 if (!handled_component_p (ref))
7297 /* We found the suitable array reference. So copy everything up to it,
7298 and replace the index. */
7300 pref = TREE_OPERAND (addr, 0);
7301 ret = copy_node (pref);
7306 pref = TREE_OPERAND (pref, 0);
7307 TREE_OPERAND (pos, 0) = copy_node (pref);
7308 pos = TREE_OPERAND (pos, 0);
7311 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7312 fold_convert (itype,
7313 TREE_OPERAND (pos, 1)),
7314 fold_convert (itype, delta));
7316 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7320 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7321 means A >= Y && A != MAX, but in this case we know that
7322 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7325 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7327 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7329 if (TREE_CODE (bound) == LT_EXPR)
7330 a = TREE_OPERAND (bound, 0);
7331 else if (TREE_CODE (bound) == GT_EXPR)
7332 a = TREE_OPERAND (bound, 1);
7336 typea = TREE_TYPE (a);
7337 if (!INTEGRAL_TYPE_P (typea)
7338 && !POINTER_TYPE_P (typea))
7341 if (TREE_CODE (ineq) == LT_EXPR)
7343 a1 = TREE_OPERAND (ineq, 1);
7344 y = TREE_OPERAND (ineq, 0);
7346 else if (TREE_CODE (ineq) == GT_EXPR)
7348 a1 = TREE_OPERAND (ineq, 0);
7349 y = TREE_OPERAND (ineq, 1);
7354 if (TREE_TYPE (a1) != typea)
7357 if (POINTER_TYPE_P (typea))
7359 /* Convert the pointer types into integer before taking the difference. */
7360 tree ta = fold_convert (ssizetype, a);
7361 tree ta1 = fold_convert (ssizetype, a1);
7362 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7365 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7367 if (!diff || !integer_onep (diff))
7370 return fold_build2 (GE_EXPR, type, a, y);
7373 /* Fold a sum or difference of at least one multiplication.
7374 Returns the folded tree or NULL if no simplification could be made. */
7377 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7379 tree arg00, arg01, arg10, arg11;
7380 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7382 /* (A * C) +- (B * C) -> (A+-B) * C.
7383 (A * C) +- A -> A * (C+-1).
7384 We are most concerned about the case where C is a constant,
7385 but other combinations show up during loop reduction. Since
7386 it is not difficult, try all four possibilities. */
7388 if (TREE_CODE (arg0) == MULT_EXPR)
7390 arg00 = TREE_OPERAND (arg0, 0);
7391 arg01 = TREE_OPERAND (arg0, 1);
7393 else if (TREE_CODE (arg0) == INTEGER_CST)
7395 arg00 = build_one_cst (type);
7400 /* We cannot generate constant 1 for fract. */
7401 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7404 arg01 = build_one_cst (type);
7406 if (TREE_CODE (arg1) == MULT_EXPR)
7408 arg10 = TREE_OPERAND (arg1, 0);
7409 arg11 = TREE_OPERAND (arg1, 1);
7411 else if (TREE_CODE (arg1) == INTEGER_CST)
7413 arg10 = build_one_cst (type);
7414 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7415 the purpose of this canonicalization. */
7416 if (TREE_INT_CST_HIGH (arg1) == -1
7417 && negate_expr_p (arg1)
7418 && code == PLUS_EXPR)
7420 arg11 = negate_expr (arg1);
7428 /* We cannot generate constant 1 for fract. */
7429 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7432 arg11 = build_one_cst (type);
7436 if (operand_equal_p (arg01, arg11, 0))
7437 same = arg01, alt0 = arg00, alt1 = arg10;
7438 else if (operand_equal_p (arg00, arg10, 0))
7439 same = arg00, alt0 = arg01, alt1 = arg11;
7440 else if (operand_equal_p (arg00, arg11, 0))
7441 same = arg00, alt0 = arg01, alt1 = arg10;
7442 else if (operand_equal_p (arg01, arg10, 0))
7443 same = arg01, alt0 = arg00, alt1 = arg11;
7445 /* No identical multiplicands; see if we can find a common
7446 power-of-two factor in non-power-of-two multiplies. This
7447 can help in multi-dimensional array access. */
7448 else if (host_integerp (arg01, 0)
7449 && host_integerp (arg11, 0))
7451 HOST_WIDE_INT int01, int11, tmp;
7454 int01 = TREE_INT_CST_LOW (arg01);
7455 int11 = TREE_INT_CST_LOW (arg11);
7457 /* Move min of absolute values to int11. */
7458 if ((int01 >= 0 ? int01 : -int01)
7459 < (int11 >= 0 ? int11 : -int11))
7461 tmp = int01, int01 = int11, int11 = tmp;
7462 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7469 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7470 /* The remainder should not be a constant, otherwise we
7471 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7472 increased the number of multiplications necessary. */
7473 && TREE_CODE (arg10) != INTEGER_CST)
7475 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7476 build_int_cst (TREE_TYPE (arg00),
7481 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7486 return fold_build2 (MULT_EXPR, type,
7487 fold_build2 (code, type,
7488 fold_convert (type, alt0),
7489 fold_convert (type, alt1)),
7490 fold_convert (type, same));
7495 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7496 specified by EXPR into the buffer PTR of length LEN bytes.
7497 Return the number of bytes placed in the buffer, or zero
7501 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7503 tree type = TREE_TYPE (expr);
7504 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7505 int byte, offset, word, words;
7506 unsigned char value;
7508 if (total_bytes > len)
7510 words = total_bytes / UNITS_PER_WORD;
7512 for (byte = 0; byte < total_bytes; byte++)
7514 int bitpos = byte * BITS_PER_UNIT;
7515 if (bitpos < HOST_BITS_PER_WIDE_INT)
7516 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7518 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7519 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7521 if (total_bytes > UNITS_PER_WORD)
7523 word = byte / UNITS_PER_WORD;
7524 if (WORDS_BIG_ENDIAN)
7525 word = (words - 1) - word;
7526 offset = word * UNITS_PER_WORD;
7527 if (BYTES_BIG_ENDIAN)
7528 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7530 offset += byte % UNITS_PER_WORD;
7533 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7534 ptr[offset] = value;
7540 /* Subroutine of native_encode_expr. Encode the REAL_CST
7541 specified by EXPR into the buffer PTR of length LEN bytes.
7542 Return the number of bytes placed in the buffer, or zero
7546 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7548 tree type = TREE_TYPE (expr);
7549 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7550 int byte, offset, word, words, bitpos;
7551 unsigned char value;
7553 /* There are always 32 bits in each long, no matter the size of
7554 the hosts long. We handle floating point representations with
7558 if (total_bytes > len)
7560 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7562 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7564 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7565 bitpos += BITS_PER_UNIT)
7567 byte = (bitpos / BITS_PER_UNIT) & 3;
7568 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7570 if (UNITS_PER_WORD < 4)
7572 word = byte / UNITS_PER_WORD;
7573 if (WORDS_BIG_ENDIAN)
7574 word = (words - 1) - word;
7575 offset = word * UNITS_PER_WORD;
7576 if (BYTES_BIG_ENDIAN)
7577 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7579 offset += byte % UNITS_PER_WORD;
7582 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7583 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7588 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7589 specified by EXPR into the buffer PTR of length LEN bytes.
7590 Return the number of bytes placed in the buffer, or zero
7594 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7599 part = TREE_REALPART (expr);
7600 rsize = native_encode_expr (part, ptr, len);
7603 part = TREE_IMAGPART (expr);
7604 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7607 return rsize + isize;
7611 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7612 specified by EXPR into the buffer PTR of length LEN bytes.
7613 Return the number of bytes placed in the buffer, or zero
7617 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7619 int i, size, offset, count;
7620 tree itype, elem, elements;
7623 elements = TREE_VECTOR_CST_ELTS (expr);
7624 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7625 itype = TREE_TYPE (TREE_TYPE (expr));
7626 size = GET_MODE_SIZE (TYPE_MODE (itype));
7627 for (i = 0; i < count; i++)
7631 elem = TREE_VALUE (elements);
7632 elements = TREE_CHAIN (elements);
7639 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7644 if (offset + size > len)
7646 memset (ptr+offset, 0, size);
7654 /* Subroutine of native_encode_expr. Encode the STRING_CST
7655 specified by EXPR into the buffer PTR of length LEN bytes.
7656 Return the number of bytes placed in the buffer, or zero
7660 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7662 tree type = TREE_TYPE (expr);
7663 HOST_WIDE_INT total_bytes;
7665 if (TREE_CODE (type) != ARRAY_TYPE
7666 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7667 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7668 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7670 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7671 if (total_bytes > len)
7673 if (TREE_STRING_LENGTH (expr) < total_bytes)
7675 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7676 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7677 total_bytes - TREE_STRING_LENGTH (expr));
7680 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7685 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7686 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7687 buffer PTR of length LEN bytes. Return the number of bytes
7688 placed in the buffer, or zero upon failure. */
7691 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7693 switch (TREE_CODE (expr))
7696 return native_encode_int (expr, ptr, len);
7699 return native_encode_real (expr, ptr, len);
7702 return native_encode_complex (expr, ptr, len);
7705 return native_encode_vector (expr, ptr, len);
7708 return native_encode_string (expr, ptr, len);
7716 /* Subroutine of native_interpret_expr. Interpret the contents of
7717 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7718 If the buffer cannot be interpreted, return NULL_TREE. */
7721 native_interpret_int (tree type, const unsigned char *ptr, int len)
7723 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7724 int byte, offset, word, words;
7725 unsigned char value;
7726 unsigned int HOST_WIDE_INT lo = 0;
7727 HOST_WIDE_INT hi = 0;
7729 if (total_bytes > len)
7731 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7733 words = total_bytes / UNITS_PER_WORD;
7735 for (byte = 0; byte < total_bytes; byte++)
7737 int bitpos = byte * BITS_PER_UNIT;
7738 if (total_bytes > UNITS_PER_WORD)
7740 word = byte / UNITS_PER_WORD;
7741 if (WORDS_BIG_ENDIAN)
7742 word = (words - 1) - word;
7743 offset = word * UNITS_PER_WORD;
7744 if (BYTES_BIG_ENDIAN)
7745 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7747 offset += byte % UNITS_PER_WORD;
7750 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7751 value = ptr[offset];
7753 if (bitpos < HOST_BITS_PER_WIDE_INT)
7754 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7756 hi |= (unsigned HOST_WIDE_INT) value
7757 << (bitpos - HOST_BITS_PER_WIDE_INT);
7760 return build_int_cst_wide_type (type, lo, hi);
7764 /* Subroutine of native_interpret_expr. Interpret the contents of
7765 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7766 If the buffer cannot be interpreted, return NULL_TREE. */
7769 native_interpret_real (tree type, const unsigned char *ptr, int len)
7771 enum machine_mode mode = TYPE_MODE (type);
7772 int total_bytes = GET_MODE_SIZE (mode);
7773 int byte, offset, word, words, bitpos;
7774 unsigned char value;
7775 /* There are always 32 bits in each long, no matter the size of
7776 the hosts long. We handle floating point representations with
7781 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7782 if (total_bytes > len || total_bytes > 24)
7784 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7786 memset (tmp, 0, sizeof (tmp));
7787 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7788 bitpos += BITS_PER_UNIT)
7790 byte = (bitpos / BITS_PER_UNIT) & 3;
7791 if (UNITS_PER_WORD < 4)
7793 word = byte / UNITS_PER_WORD;
7794 if (WORDS_BIG_ENDIAN)
7795 word = (words - 1) - word;
7796 offset = word * UNITS_PER_WORD;
7797 if (BYTES_BIG_ENDIAN)
7798 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7800 offset += byte % UNITS_PER_WORD;
7803 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7804 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7806 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7809 real_from_target (&r, tmp, mode);
7810 return build_real (type, r);
7814 /* Subroutine of native_interpret_expr. Interpret the contents of
7815 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7816 If the buffer cannot be interpreted, return NULL_TREE. */
7819 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7821 tree etype, rpart, ipart;
7824 etype = TREE_TYPE (type);
7825 size = GET_MODE_SIZE (TYPE_MODE (etype));
7828 rpart = native_interpret_expr (etype, ptr, size);
7831 ipart = native_interpret_expr (etype, ptr+size, size);
7834 return build_complex (type, rpart, ipart);
7838 /* Subroutine of native_interpret_expr. Interpret the contents of
7839 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7840 If the buffer cannot be interpreted, return NULL_TREE. */
7843 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7845 tree etype, elem, elements;
7848 etype = TREE_TYPE (type);
7849 size = GET_MODE_SIZE (TYPE_MODE (etype));
7850 count = TYPE_VECTOR_SUBPARTS (type);
7851 if (size * count > len)
7854 elements = NULL_TREE;
7855 for (i = count - 1; i >= 0; i--)
7857 elem = native_interpret_expr (etype, ptr+(i*size), size);
7860 elements = tree_cons (NULL_TREE, elem, elements);
7862 return build_vector (type, elements);
7866 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7867 the buffer PTR of length LEN as a constant of type TYPE. For
7868 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7869 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7870 return NULL_TREE. */
7873 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7875 switch (TREE_CODE (type))
7880 return native_interpret_int (type, ptr, len);
7883 return native_interpret_real (type, ptr, len);
7886 return native_interpret_complex (type, ptr, len);
7889 return native_interpret_vector (type, ptr, len);
7897 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7898 TYPE at compile-time. If we're unable to perform the conversion
7899 return NULL_TREE. */
7902 fold_view_convert_expr (tree type, tree expr)
7904 /* We support up to 512-bit values (for V8DFmode). */
7905 unsigned char buffer[64];
7908 /* Check that the host and target are sane. */
7909 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7912 len = native_encode_expr (expr, buffer, sizeof (buffer));
7916 return native_interpret_expr (type, buffer, len);
7919 /* Build an expression for the address of T. Folds away INDIRECT_REF
7920 to avoid confusing the gimplify process. */
7923 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7925 /* The size of the object is not relevant when talking about its address. */
7926 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7927 t = TREE_OPERAND (t, 0);
7929 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7930 if (TREE_CODE (t) == INDIRECT_REF
7931 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7933 t = TREE_OPERAND (t, 0);
7935 if (TREE_TYPE (t) != ptrtype)
7936 t = build1 (NOP_EXPR, ptrtype, t);
7939 t = build1 (ADDR_EXPR, ptrtype, t);
7944 /* Build an expression for the address of T. */
7947 build_fold_addr_expr (tree t)
7949 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7951 return build_fold_addr_expr_with_type (t, ptrtype);
7954 /* Fold a unary expression of code CODE and type TYPE with operand
7955 OP0. Return the folded expression if folding is successful.
7956 Otherwise, return NULL_TREE. */
7959 fold_unary (enum tree_code code, tree type, tree op0)
7963 enum tree_code_class kind = TREE_CODE_CLASS (code);
7965 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7966 && TREE_CODE_LENGTH (code) == 1);
7971 if (CONVERT_EXPR_CODE_P (code)
7972 || code == FLOAT_EXPR || code == ABS_EXPR)
7974 /* Don't use STRIP_NOPS, because signedness of argument type
7976 STRIP_SIGN_NOPS (arg0);
7980 /* Strip any conversions that don't change the mode. This
7981 is safe for every expression, except for a comparison
7982 expression because its signedness is derived from its
7985 Note that this is done as an internal manipulation within
7986 the constant folder, in order to find the simplest
7987 representation of the arguments so that their form can be
7988 studied. In any cases, the appropriate type conversions
7989 should be put back in the tree that will get out of the
7995 if (TREE_CODE_CLASS (code) == tcc_unary)
7997 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7998 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7999 fold_build1 (code, type,
8000 fold_convert (TREE_TYPE (op0),
8001 TREE_OPERAND (arg0, 1))));
8002 else if (TREE_CODE (arg0) == COND_EXPR)
8004 tree arg01 = TREE_OPERAND (arg0, 1);
8005 tree arg02 = TREE_OPERAND (arg0, 2);
8006 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8007 arg01 = fold_build1 (code, type,
8008 fold_convert (TREE_TYPE (op0), arg01));
8009 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8010 arg02 = fold_build1 (code, type,
8011 fold_convert (TREE_TYPE (op0), arg02));
8012 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8015 /* If this was a conversion, and all we did was to move into
8016 inside the COND_EXPR, bring it back out. But leave it if
8017 it is a conversion from integer to integer and the
8018 result precision is no wider than a word since such a
8019 conversion is cheap and may be optimized away by combine,
8020 while it couldn't if it were outside the COND_EXPR. Then return
8021 so we don't get into an infinite recursion loop taking the
8022 conversion out and then back in. */
8024 if ((CONVERT_EXPR_CODE_P (code)
8025 || code == NON_LVALUE_EXPR)
8026 && TREE_CODE (tem) == COND_EXPR
8027 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8028 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8029 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8030 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8031 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8032 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8033 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8035 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8036 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8037 || flag_syntax_only))
8038 tem = build1 (code, type,
8040 TREE_TYPE (TREE_OPERAND
8041 (TREE_OPERAND (tem, 1), 0)),
8042 TREE_OPERAND (tem, 0),
8043 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8044 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8047 else if (COMPARISON_CLASS_P (arg0))
8049 if (TREE_CODE (type) == BOOLEAN_TYPE)
8051 arg0 = copy_node (arg0);
8052 TREE_TYPE (arg0) = type;
8055 else if (TREE_CODE (type) != INTEGER_TYPE)
8056 return fold_build3 (COND_EXPR, type, arg0,
8057 fold_build1 (code, type,
8059 fold_build1 (code, type,
8060 integer_zero_node));
8067 /* Re-association barriers around constants and other re-association
8068 barriers can be removed. */
8069 if (CONSTANT_CLASS_P (op0)
8070 || TREE_CODE (op0) == PAREN_EXPR)
8071 return fold_convert (type, op0);
8076 case FIX_TRUNC_EXPR:
8077 if (TREE_TYPE (op0) == type)
8080 /* If we have (type) (a CMP b) and type is an integral type, return
8081 new expression involving the new type. */
8082 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8083 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8084 TREE_OPERAND (op0, 1));
8086 /* Handle cases of two conversions in a row. */
8087 if (CONVERT_EXPR_P (op0))
8089 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8090 tree inter_type = TREE_TYPE (op0);
8091 int inside_int = INTEGRAL_TYPE_P (inside_type);
8092 int inside_ptr = POINTER_TYPE_P (inside_type);
8093 int inside_float = FLOAT_TYPE_P (inside_type);
8094 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8095 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8096 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8097 int inter_int = INTEGRAL_TYPE_P (inter_type);
8098 int inter_ptr = POINTER_TYPE_P (inter_type);
8099 int inter_float = FLOAT_TYPE_P (inter_type);
8100 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8101 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8102 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8103 int final_int = INTEGRAL_TYPE_P (type);
8104 int final_ptr = POINTER_TYPE_P (type);
8105 int final_float = FLOAT_TYPE_P (type);
8106 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8107 unsigned int final_prec = TYPE_PRECISION (type);
8108 int final_unsignedp = TYPE_UNSIGNED (type);
8110 /* In addition to the cases of two conversions in a row
8111 handled below, if we are converting something to its own
8112 type via an object of identical or wider precision, neither
8113 conversion is needed. */
8114 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8115 && (((inter_int || inter_ptr) && final_int)
8116 || (inter_float && final_float))
8117 && inter_prec >= final_prec)
8118 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8120 /* Likewise, if the intermediate and initial types are either both
8121 float or both integer, we don't need the middle conversion if the
8122 former is wider than the latter and doesn't change the signedness
8123 (for integers). Avoid this if the final type is a pointer since
8124 then we sometimes need the middle conversion. Likewise if the
8125 final type has a precision not equal to the size of its mode. */
8126 if (((inter_int && inside_int)
8127 || (inter_float && inside_float)
8128 || (inter_vec && inside_vec))
8129 && inter_prec >= inside_prec
8130 && (inter_float || inter_vec
8131 || inter_unsignedp == inside_unsignedp)
8132 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8133 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8135 && (! final_vec || inter_prec == inside_prec))
8136 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8138 /* If we have a sign-extension of a zero-extended value, we can
8139 replace that by a single zero-extension. */
8140 if (inside_int && inter_int && final_int
8141 && inside_prec < inter_prec && inter_prec < final_prec
8142 && inside_unsignedp && !inter_unsignedp)
8143 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8145 /* Two conversions in a row are not needed unless:
8146 - some conversion is floating-point (overstrict for now), or
8147 - some conversion is a vector (overstrict for now), or
8148 - the intermediate type is narrower than both initial and
8150 - the intermediate type and innermost type differ in signedness,
8151 and the outermost type is wider than the intermediate, or
8152 - the initial type is a pointer type and the precisions of the
8153 intermediate and final types differ, or
8154 - the final type is a pointer type and the precisions of the
8155 initial and intermediate types differ. */
8156 if (! inside_float && ! inter_float && ! final_float
8157 && ! inside_vec && ! inter_vec && ! final_vec
8158 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8159 && ! (inside_int && inter_int
8160 && inter_unsignedp != inside_unsignedp
8161 && inter_prec < final_prec)
8162 && ((inter_unsignedp && inter_prec > inside_prec)
8163 == (final_unsignedp && final_prec > inter_prec))
8164 && ! (inside_ptr && inter_prec != final_prec)
8165 && ! (final_ptr && inside_prec != inter_prec)
8166 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8167 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8168 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8171 /* Handle (T *)&A.B.C for A being of type T and B and C
8172 living at offset zero. This occurs frequently in
8173 C++ upcasting and then accessing the base. */
8174 if (TREE_CODE (op0) == ADDR_EXPR
8175 && POINTER_TYPE_P (type)
8176 && handled_component_p (TREE_OPERAND (op0, 0)))
8178 HOST_WIDE_INT bitsize, bitpos;
8180 enum machine_mode mode;
8181 int unsignedp, volatilep;
8182 tree base = TREE_OPERAND (op0, 0);
8183 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8184 &mode, &unsignedp, &volatilep, false);
8185 /* If the reference was to a (constant) zero offset, we can use
8186 the address of the base if it has the same base type
8187 as the result type. */
8188 if (! offset && bitpos == 0
8189 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8190 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8191 return fold_convert (type, build_fold_addr_expr (base));
8194 if (TREE_CODE (op0) == MODIFY_EXPR
8195 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8196 /* Detect assigning a bitfield. */
8197 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8199 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8201 /* Don't leave an assignment inside a conversion
8202 unless assigning a bitfield. */
8203 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8204 /* First do the assignment, then return converted constant. */
8205 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8206 TREE_NO_WARNING (tem) = 1;
8207 TREE_USED (tem) = 1;
8211 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8212 constants (if x has signed type, the sign bit cannot be set
8213 in c). This folds extension into the BIT_AND_EXPR.
8214 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8215 very likely don't have maximal range for their precision and this
8216 transformation effectively doesn't preserve non-maximal ranges. */
8217 if (TREE_CODE (type) == INTEGER_TYPE
8218 && TREE_CODE (op0) == BIT_AND_EXPR
8219 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
8220 /* Not if the conversion is to the sub-type. */
8221 && TREE_TYPE (type) != TREE_TYPE (op0))
8224 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8227 if (TYPE_UNSIGNED (TREE_TYPE (and))
8228 || (TYPE_PRECISION (type)
8229 <= TYPE_PRECISION (TREE_TYPE (and))))
8231 else if (TYPE_PRECISION (TREE_TYPE (and1))
8232 <= HOST_BITS_PER_WIDE_INT
8233 && host_integerp (and1, 1))
8235 unsigned HOST_WIDE_INT cst;
8237 cst = tree_low_cst (and1, 1);
8238 cst &= (HOST_WIDE_INT) -1
8239 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8240 change = (cst == 0);
8241 #ifdef LOAD_EXTEND_OP
8243 && !flag_syntax_only
8244 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8247 tree uns = unsigned_type_for (TREE_TYPE (and0));
8248 and0 = fold_convert (uns, and0);
8249 and1 = fold_convert (uns, and1);
8255 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8256 TREE_INT_CST_HIGH (and1), 0,
8257 TREE_OVERFLOW (and1));
8258 return fold_build2 (BIT_AND_EXPR, type,
8259 fold_convert (type, and0), tem);
8263 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8264 when one of the new casts will fold away. Conservatively we assume
8265 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8266 if (POINTER_TYPE_P (type)
8267 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8268 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8269 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8270 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8272 tree arg00 = TREE_OPERAND (arg0, 0);
8273 tree arg01 = TREE_OPERAND (arg0, 1);
8275 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8276 fold_convert (sizetype, arg01));
8279 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8280 of the same precision, and X is an integer type not narrower than
8281 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8282 if (INTEGRAL_TYPE_P (type)
8283 && TREE_CODE (op0) == BIT_NOT_EXPR
8284 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8285 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8286 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8288 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8289 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8290 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8291 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8294 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8295 type of X and Y (integer types only). */
8296 if (INTEGRAL_TYPE_P (type)
8297 && TREE_CODE (op0) == MULT_EXPR
8298 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8299 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8301 /* Be careful not to introduce new overflows. */
8303 if (TYPE_OVERFLOW_WRAPS (type))
8306 mult_type = unsigned_type_for (type);
8308 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8310 tem = fold_build2 (MULT_EXPR, mult_type,
8311 fold_convert (mult_type,
8312 TREE_OPERAND (op0, 0)),
8313 fold_convert (mult_type,
8314 TREE_OPERAND (op0, 1)));
8315 return fold_convert (type, tem);
8319 tem = fold_convert_const (code, type, op0);
8320 return tem ? tem : NULL_TREE;
8322 case FIXED_CONVERT_EXPR:
8323 tem = fold_convert_const (code, type, arg0);
8324 return tem ? tem : NULL_TREE;
8326 case VIEW_CONVERT_EXPR:
8327 if (TREE_TYPE (op0) == type)
8329 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8330 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8332 /* For integral conversions with the same precision or pointer
8333 conversions use a NOP_EXPR instead. */
8334 if ((INTEGRAL_TYPE_P (type)
8335 || POINTER_TYPE_P (type))
8336 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8337 || POINTER_TYPE_P (TREE_TYPE (op0)))
8338 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8339 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8340 a sub-type to its base type as generated by the Ada FE. */
8341 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8342 && TREE_TYPE (TREE_TYPE (op0))))
8343 return fold_convert (type, op0);
8345 /* Strip inner integral conversions that do not change the precision. */
8346 if (CONVERT_EXPR_P (op0)
8347 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8348 || POINTER_TYPE_P (TREE_TYPE (op0)))
8349 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8350 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8351 && (TYPE_PRECISION (TREE_TYPE (op0))
8352 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8353 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8355 return fold_view_convert_expr (type, op0);
8358 tem = fold_negate_expr (arg0);
8360 return fold_convert (type, tem);
8364 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8365 return fold_abs_const (arg0, type);
8366 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8367 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8368 /* Convert fabs((double)float) into (double)fabsf(float). */
8369 else if (TREE_CODE (arg0) == NOP_EXPR
8370 && TREE_CODE (type) == REAL_TYPE)
8372 tree targ0 = strip_float_extensions (arg0);
8374 return fold_convert (type, fold_build1 (ABS_EXPR,
8378 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8379 else if (TREE_CODE (arg0) == ABS_EXPR)
8381 else if (tree_expr_nonnegative_p (arg0))
8384 /* Strip sign ops from argument. */
8385 if (TREE_CODE (type) == REAL_TYPE)
8387 tem = fold_strip_sign_ops (arg0);
8389 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8394 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8395 return fold_convert (type, arg0);
8396 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8398 tree itype = TREE_TYPE (type);
8399 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8400 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8401 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8403 if (TREE_CODE (arg0) == COMPLEX_CST)
8405 tree itype = TREE_TYPE (type);
8406 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8407 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8408 return build_complex (type, rpart, negate_expr (ipart));
8410 if (TREE_CODE (arg0) == CONJ_EXPR)
8411 return fold_convert (type, TREE_OPERAND (arg0, 0));
8415 if (TREE_CODE (arg0) == INTEGER_CST)
8416 return fold_not_const (arg0, type);
8417 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8418 return fold_convert (type, TREE_OPERAND (arg0, 0));
8419 /* Convert ~ (-A) to A - 1. */
8420 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8421 return fold_build2 (MINUS_EXPR, type,
8422 fold_convert (type, TREE_OPERAND (arg0, 0)),
8423 build_int_cst (type, 1));
8424 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8425 else if (INTEGRAL_TYPE_P (type)
8426 && ((TREE_CODE (arg0) == MINUS_EXPR
8427 && integer_onep (TREE_OPERAND (arg0, 1)))
8428 || (TREE_CODE (arg0) == PLUS_EXPR
8429 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8430 return fold_build1 (NEGATE_EXPR, type,
8431 fold_convert (type, TREE_OPERAND (arg0, 0)));
8432 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8433 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8434 && (tem = fold_unary (BIT_NOT_EXPR, type,
8436 TREE_OPERAND (arg0, 0)))))
8437 return fold_build2 (BIT_XOR_EXPR, type, tem,
8438 fold_convert (type, TREE_OPERAND (arg0, 1)));
8439 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8440 && (tem = fold_unary (BIT_NOT_EXPR, type,
8442 TREE_OPERAND (arg0, 1)))))
8443 return fold_build2 (BIT_XOR_EXPR, type,
8444 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8445 /* Perform BIT_NOT_EXPR on each element individually. */
8446 else if (TREE_CODE (arg0) == VECTOR_CST)
8448 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8449 int count = TYPE_VECTOR_SUBPARTS (type), i;
8451 for (i = 0; i < count; i++)
8455 elem = TREE_VALUE (elements);
8456 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8457 if (elem == NULL_TREE)
8459 elements = TREE_CHAIN (elements);
8462 elem = build_int_cst (TREE_TYPE (type), -1);
8463 list = tree_cons (NULL_TREE, elem, list);
8466 return build_vector (type, nreverse (list));
8471 case TRUTH_NOT_EXPR:
8472 /* The argument to invert_truthvalue must have Boolean type. */
8473 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8474 arg0 = fold_convert (boolean_type_node, arg0);
8476 /* Note that the operand of this must be an int
8477 and its values must be 0 or 1.
8478 ("true" is a fixed value perhaps depending on the language,
8479 but we don't handle values other than 1 correctly yet.) */
8480 tem = fold_truth_not_expr (arg0);
8483 return fold_convert (type, tem);
8486 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8487 return fold_convert (type, arg0);
8488 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8489 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8490 TREE_OPERAND (arg0, 1));
8491 if (TREE_CODE (arg0) == COMPLEX_CST)
8492 return fold_convert (type, TREE_REALPART (arg0));
8493 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8495 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8496 tem = fold_build2 (TREE_CODE (arg0), itype,
8497 fold_build1 (REALPART_EXPR, itype,
8498 TREE_OPERAND (arg0, 0)),
8499 fold_build1 (REALPART_EXPR, itype,
8500 TREE_OPERAND (arg0, 1)));
8501 return fold_convert (type, tem);
8503 if (TREE_CODE (arg0) == CONJ_EXPR)
8505 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8506 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8507 return fold_convert (type, tem);
8509 if (TREE_CODE (arg0) == CALL_EXPR)
8511 tree fn = get_callee_fndecl (arg0);
8512 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8513 switch (DECL_FUNCTION_CODE (fn))
8515 CASE_FLT_FN (BUILT_IN_CEXPI):
8516 fn = mathfn_built_in (type, BUILT_IN_COS);
8518 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8528 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8529 return fold_convert (type, integer_zero_node);
8530 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8531 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8532 TREE_OPERAND (arg0, 0));
8533 if (TREE_CODE (arg0) == COMPLEX_CST)
8534 return fold_convert (type, TREE_IMAGPART (arg0));
8535 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8537 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8538 tem = fold_build2 (TREE_CODE (arg0), itype,
8539 fold_build1 (IMAGPART_EXPR, itype,
8540 TREE_OPERAND (arg0, 0)),
8541 fold_build1 (IMAGPART_EXPR, itype,
8542 TREE_OPERAND (arg0, 1)));
8543 return fold_convert (type, tem);
8545 if (TREE_CODE (arg0) == CONJ_EXPR)
8547 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8548 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8549 return fold_convert (type, negate_expr (tem));
8551 if (TREE_CODE (arg0) == CALL_EXPR)
8553 tree fn = get_callee_fndecl (arg0);
8554 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8555 switch (DECL_FUNCTION_CODE (fn))
8557 CASE_FLT_FN (BUILT_IN_CEXPI):
8558 fn = mathfn_built_in (type, BUILT_IN_SIN);
8560 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8571 } /* switch (code) */
8575 /* If the operation was a conversion do _not_ mark a resulting constant
8576 with TREE_OVERFLOW if the original constant was not. These conversions
8577 have implementation defined behavior and retaining the TREE_OVERFLOW
8578 flag here would confuse later passes such as VRP. */
8580 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8582 tree res = fold_unary (code, type, op0);
8584 && TREE_CODE (res) == INTEGER_CST
8585 && TREE_CODE (op0) == INTEGER_CST
8586 && CONVERT_EXPR_CODE_P (code))
8587 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8592 /* Fold a binary expression of code CODE and type TYPE with operands
8593 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8594 Return the folded expression if folding is successful. Otherwise,
8595 return NULL_TREE. */
8598 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8600 enum tree_code compl_code;
8602 if (code == MIN_EXPR)
8603 compl_code = MAX_EXPR;
8604 else if (code == MAX_EXPR)
8605 compl_code = MIN_EXPR;
8609 /* MIN (MAX (a, b), b) == b. */
8610 if (TREE_CODE (op0) == compl_code
8611 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8612 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8614 /* MIN (MAX (b, a), b) == b. */
8615 if (TREE_CODE (op0) == compl_code
8616 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8617 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8618 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8620 /* MIN (a, MAX (a, b)) == a. */
8621 if (TREE_CODE (op1) == compl_code
8622 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8623 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8624 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8626 /* MIN (a, MAX (b, a)) == a. */
8627 if (TREE_CODE (op1) == compl_code
8628 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8629 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8630 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8635 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8636 by changing CODE to reduce the magnitude of constants involved in
8637 ARG0 of the comparison.
8638 Returns a canonicalized comparison tree if a simplification was
8639 possible, otherwise returns NULL_TREE.
8640 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8641 valid if signed overflow is undefined. */
8644 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8645 tree arg0, tree arg1,
8646 bool *strict_overflow_p)
8648 enum tree_code code0 = TREE_CODE (arg0);
8649 tree t, cst0 = NULL_TREE;
8653 /* Match A +- CST code arg1 and CST code arg1. We can change the
8654 first form only if overflow is undefined. */
8655 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8656 /* In principle pointers also have undefined overflow behavior,
8657 but that causes problems elsewhere. */
8658 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8659 && (code0 == MINUS_EXPR
8660 || code0 == PLUS_EXPR)
8661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8662 || code0 == INTEGER_CST))
8665 /* Identify the constant in arg0 and its sign. */
8666 if (code0 == INTEGER_CST)
8669 cst0 = TREE_OPERAND (arg0, 1);
8670 sgn0 = tree_int_cst_sgn (cst0);
8672 /* Overflowed constants and zero will cause problems. */
8673 if (integer_zerop (cst0)
8674 || TREE_OVERFLOW (cst0))
8677 /* See if we can reduce the magnitude of the constant in
8678 arg0 by changing the comparison code. */
8679 if (code0 == INTEGER_CST)
8681 /* CST <= arg1 -> CST-1 < arg1. */
8682 if (code == LE_EXPR && sgn0 == 1)
8684 /* -CST < arg1 -> -CST-1 <= arg1. */
8685 else if (code == LT_EXPR && sgn0 == -1)
8687 /* CST > arg1 -> CST-1 >= arg1. */
8688 else if (code == GT_EXPR && sgn0 == 1)
8690 /* -CST >= arg1 -> -CST-1 > arg1. */
8691 else if (code == GE_EXPR && sgn0 == -1)
8695 /* arg1 code' CST' might be more canonical. */
8700 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8702 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8704 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8705 else if (code == GT_EXPR
8706 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8708 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8709 else if (code == LE_EXPR
8710 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8712 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8713 else if (code == GE_EXPR
8714 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8718 *strict_overflow_p = true;
8721 /* Now build the constant reduced in magnitude. But not if that
8722 would produce one outside of its types range. */
8723 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8725 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8726 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8728 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8729 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8730 /* We cannot swap the comparison here as that would cause us to
8731 endlessly recurse. */
8734 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8735 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8736 if (code0 != INTEGER_CST)
8737 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8739 /* If swapping might yield to a more canonical form, do so. */
8741 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8743 return fold_build2 (code, type, t, arg1);
8746 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8747 overflow further. Try to decrease the magnitude of constants involved
8748 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8749 and put sole constants at the second argument position.
8750 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8753 maybe_canonicalize_comparison (enum tree_code code, tree type,
8754 tree arg0, tree arg1)
8757 bool strict_overflow_p;
8758 const char * const warnmsg = G_("assuming signed overflow does not occur "
8759 "when reducing constant in comparison");
8761 /* Try canonicalization by simplifying arg0. */
8762 strict_overflow_p = false;
8763 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8764 &strict_overflow_p);
8767 if (strict_overflow_p)
8768 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8772 /* Try canonicalization by simplifying arg1 using the swapped
8774 code = swap_tree_comparison (code);
8775 strict_overflow_p = false;
8776 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8777 &strict_overflow_p);
8778 if (t && strict_overflow_p)
8779 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8783 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8784 space. This is used to avoid issuing overflow warnings for
8785 expressions like &p->x which can not wrap. */
8788 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8790 unsigned HOST_WIDE_INT offset_low, total_low;
8791 HOST_WIDE_INT size, offset_high, total_high;
8793 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8799 if (offset == NULL_TREE)
8804 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8808 offset_low = TREE_INT_CST_LOW (offset);
8809 offset_high = TREE_INT_CST_HIGH (offset);
8812 if (add_double_with_sign (offset_low, offset_high,
8813 bitpos / BITS_PER_UNIT, 0,
8814 &total_low, &total_high,
8818 if (total_high != 0)
8821 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8825 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8827 if (TREE_CODE (base) == ADDR_EXPR)
8829 HOST_WIDE_INT base_size;
8831 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8832 if (base_size > 0 && size < base_size)
8836 return total_low > (unsigned HOST_WIDE_INT) size;
8839 /* Subroutine of fold_binary. This routine performs all of the
8840 transformations that are common to the equality/inequality
8841 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8842 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8843 fold_binary should call fold_binary. Fold a comparison with
8844 tree code CODE and type TYPE with operands OP0 and OP1. Return
8845 the folded comparison or NULL_TREE. */
8848 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8850 tree arg0, arg1, tem;
8855 STRIP_SIGN_NOPS (arg0);
8856 STRIP_SIGN_NOPS (arg1);
8858 tem = fold_relational_const (code, type, arg0, arg1);
8859 if (tem != NULL_TREE)
8862 /* If one arg is a real or integer constant, put it last. */
8863 if (tree_swap_operands_p (arg0, arg1, true))
8864 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8866 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8867 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8868 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8869 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8870 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8871 && (TREE_CODE (arg1) == INTEGER_CST
8872 && !TREE_OVERFLOW (arg1)))
8874 tree const1 = TREE_OPERAND (arg0, 1);
8876 tree variable = TREE_OPERAND (arg0, 0);
8879 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8881 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8882 TREE_TYPE (arg1), const2, const1);
8884 /* If the constant operation overflowed this can be
8885 simplified as a comparison against INT_MAX/INT_MIN. */
8886 if (TREE_CODE (lhs) == INTEGER_CST
8887 && TREE_OVERFLOW (lhs))
8889 int const1_sgn = tree_int_cst_sgn (const1);
8890 enum tree_code code2 = code;
8892 /* Get the sign of the constant on the lhs if the
8893 operation were VARIABLE + CONST1. */
8894 if (TREE_CODE (arg0) == MINUS_EXPR)
8895 const1_sgn = -const1_sgn;
8897 /* The sign of the constant determines if we overflowed
8898 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8899 Canonicalize to the INT_MIN overflow by swapping the comparison
8901 if (const1_sgn == -1)
8902 code2 = swap_tree_comparison (code);
8904 /* We now can look at the canonicalized case
8905 VARIABLE + 1 CODE2 INT_MIN
8906 and decide on the result. */
8907 if (code2 == LT_EXPR
8909 || code2 == EQ_EXPR)
8910 return omit_one_operand (type, boolean_false_node, variable);
8911 else if (code2 == NE_EXPR
8913 || code2 == GT_EXPR)
8914 return omit_one_operand (type, boolean_true_node, variable);
8917 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8918 && (TREE_CODE (lhs) != INTEGER_CST
8919 || !TREE_OVERFLOW (lhs)))
8921 fold_overflow_warning (("assuming signed overflow does not occur "
8922 "when changing X +- C1 cmp C2 to "
8924 WARN_STRICT_OVERFLOW_COMPARISON);
8925 return fold_build2 (code, type, variable, lhs);
8929 /* For comparisons of pointers we can decompose it to a compile time
8930 comparison of the base objects and the offsets into the object.
8931 This requires at least one operand being an ADDR_EXPR or a
8932 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8933 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8934 && (TREE_CODE (arg0) == ADDR_EXPR
8935 || TREE_CODE (arg1) == ADDR_EXPR
8936 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8937 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8939 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8940 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8941 enum machine_mode mode;
8942 int volatilep, unsignedp;
8943 bool indirect_base0 = false, indirect_base1 = false;
8945 /* Get base and offset for the access. Strip ADDR_EXPR for
8946 get_inner_reference, but put it back by stripping INDIRECT_REF
8947 off the base object if possible. indirect_baseN will be true
8948 if baseN is not an address but refers to the object itself. */
8950 if (TREE_CODE (arg0) == ADDR_EXPR)
8952 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8953 &bitsize, &bitpos0, &offset0, &mode,
8954 &unsignedp, &volatilep, false);
8955 if (TREE_CODE (base0) == INDIRECT_REF)
8956 base0 = TREE_OPERAND (base0, 0);
8958 indirect_base0 = true;
8960 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8962 base0 = TREE_OPERAND (arg0, 0);
8963 offset0 = TREE_OPERAND (arg0, 1);
8967 if (TREE_CODE (arg1) == ADDR_EXPR)
8969 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8970 &bitsize, &bitpos1, &offset1, &mode,
8971 &unsignedp, &volatilep, false);
8972 if (TREE_CODE (base1) == INDIRECT_REF)
8973 base1 = TREE_OPERAND (base1, 0);
8975 indirect_base1 = true;
8977 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8979 base1 = TREE_OPERAND (arg1, 0);
8980 offset1 = TREE_OPERAND (arg1, 1);
8983 /* If we have equivalent bases we might be able to simplify. */
8984 if (indirect_base0 == indirect_base1
8985 && operand_equal_p (base0, base1, 0))
8987 /* We can fold this expression to a constant if the non-constant
8988 offset parts are equal. */
8989 if ((offset0 == offset1
8990 || (offset0 && offset1
8991 && operand_equal_p (offset0, offset1, 0)))
8994 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8999 && bitpos0 != bitpos1
9000 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9001 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9002 fold_overflow_warning (("assuming pointer wraparound does not "
9003 "occur when comparing P +- C1 with "
9005 WARN_STRICT_OVERFLOW_CONDITIONAL);
9010 return constant_boolean_node (bitpos0 == bitpos1, type);
9012 return constant_boolean_node (bitpos0 != bitpos1, type);
9014 return constant_boolean_node (bitpos0 < bitpos1, type);
9016 return constant_boolean_node (bitpos0 <= bitpos1, type);
9018 return constant_boolean_node (bitpos0 >= bitpos1, type);
9020 return constant_boolean_node (bitpos0 > bitpos1, type);
9024 /* We can simplify the comparison to a comparison of the variable
9025 offset parts if the constant offset parts are equal.
9026 Be careful to use signed size type here because otherwise we
9027 mess with array offsets in the wrong way. This is possible
9028 because pointer arithmetic is restricted to retain within an
9029 object and overflow on pointer differences is undefined as of
9030 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9031 else if (bitpos0 == bitpos1
9032 && ((code == EQ_EXPR || code == NE_EXPR)
9033 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9035 tree signed_size_type_node;
9036 signed_size_type_node = signed_type_for (size_type_node);
9038 /* By converting to signed size type we cover middle-end pointer
9039 arithmetic which operates on unsigned pointer types of size
9040 type size and ARRAY_REF offsets which are properly sign or
9041 zero extended from their type in case it is narrower than
9043 if (offset0 == NULL_TREE)
9044 offset0 = build_int_cst (signed_size_type_node, 0);
9046 offset0 = fold_convert (signed_size_type_node, offset0);
9047 if (offset1 == NULL_TREE)
9048 offset1 = build_int_cst (signed_size_type_node, 0);
9050 offset1 = fold_convert (signed_size_type_node, offset1);
9054 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9055 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9056 fold_overflow_warning (("assuming pointer wraparound does not "
9057 "occur when comparing P +- C1 with "
9059 WARN_STRICT_OVERFLOW_COMPARISON);
9061 return fold_build2 (code, type, offset0, offset1);
9064 /* For non-equal bases we can simplify if they are addresses
9065 of local binding decls or constants. */
9066 else if (indirect_base0 && indirect_base1
9067 /* We know that !operand_equal_p (base0, base1, 0)
9068 because the if condition was false. But make
9069 sure two decls are not the same. */
9071 && TREE_CODE (arg0) == ADDR_EXPR
9072 && TREE_CODE (arg1) == ADDR_EXPR
9073 && (((TREE_CODE (base0) == VAR_DECL
9074 || TREE_CODE (base0) == PARM_DECL)
9075 && (targetm.binds_local_p (base0)
9076 || CONSTANT_CLASS_P (base1)))
9077 || CONSTANT_CLASS_P (base0))
9078 && (((TREE_CODE (base1) == VAR_DECL
9079 || TREE_CODE (base1) == PARM_DECL)
9080 && (targetm.binds_local_p (base1)
9081 || CONSTANT_CLASS_P (base0)))
9082 || CONSTANT_CLASS_P (base1)))
9084 if (code == EQ_EXPR)
9085 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9086 else if (code == NE_EXPR)
9087 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9089 /* For equal offsets we can simplify to a comparison of the
9091 else if (bitpos0 == bitpos1
9093 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9095 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9096 && ((offset0 == offset1)
9097 || (offset0 && offset1
9098 && operand_equal_p (offset0, offset1, 0))))
9101 base0 = build_fold_addr_expr (base0);
9103 base1 = build_fold_addr_expr (base1);
9104 return fold_build2 (code, type, base0, base1);
9108 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9109 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9110 the resulting offset is smaller in absolute value than the
9112 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9113 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9114 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9115 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9116 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9117 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9118 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9120 tree const1 = TREE_OPERAND (arg0, 1);
9121 tree const2 = TREE_OPERAND (arg1, 1);
9122 tree variable1 = TREE_OPERAND (arg0, 0);
9123 tree variable2 = TREE_OPERAND (arg1, 0);
9125 const char * const warnmsg = G_("assuming signed overflow does not "
9126 "occur when combining constants around "
9129 /* Put the constant on the side where it doesn't overflow and is
9130 of lower absolute value than before. */
9131 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9132 ? MINUS_EXPR : PLUS_EXPR,
9134 if (!TREE_OVERFLOW (cst)
9135 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9137 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9138 return fold_build2 (code, type,
9140 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9144 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9145 ? MINUS_EXPR : PLUS_EXPR,
9147 if (!TREE_OVERFLOW (cst)
9148 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9150 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9151 return fold_build2 (code, type,
9152 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9158 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9159 signed arithmetic case. That form is created by the compiler
9160 often enough for folding it to be of value. One example is in
9161 computing loop trip counts after Operator Strength Reduction. */
9162 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9163 && TREE_CODE (arg0) == MULT_EXPR
9164 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9165 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9166 && integer_zerop (arg1))
9168 tree const1 = TREE_OPERAND (arg0, 1);
9169 tree const2 = arg1; /* zero */
9170 tree variable1 = TREE_OPERAND (arg0, 0);
9171 enum tree_code cmp_code = code;
9173 gcc_assert (!integer_zerop (const1));
9175 fold_overflow_warning (("assuming signed overflow does not occur when "
9176 "eliminating multiplication in comparison "
9178 WARN_STRICT_OVERFLOW_COMPARISON);
9180 /* If const1 is negative we swap the sense of the comparison. */
9181 if (tree_int_cst_sgn (const1) < 0)
9182 cmp_code = swap_tree_comparison (cmp_code);
9184 return fold_build2 (cmp_code, type, variable1, const2);
9187 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9191 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9193 tree targ0 = strip_float_extensions (arg0);
9194 tree targ1 = strip_float_extensions (arg1);
9195 tree newtype = TREE_TYPE (targ0);
9197 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9198 newtype = TREE_TYPE (targ1);
9200 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9201 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9202 return fold_build2 (code, type, fold_convert (newtype, targ0),
9203 fold_convert (newtype, targ1));
9205 /* (-a) CMP (-b) -> b CMP a */
9206 if (TREE_CODE (arg0) == NEGATE_EXPR
9207 && TREE_CODE (arg1) == NEGATE_EXPR)
9208 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9209 TREE_OPERAND (arg0, 0));
9211 if (TREE_CODE (arg1) == REAL_CST)
9213 REAL_VALUE_TYPE cst;
9214 cst = TREE_REAL_CST (arg1);
9216 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9217 if (TREE_CODE (arg0) == NEGATE_EXPR)
9218 return fold_build2 (swap_tree_comparison (code), type,
9219 TREE_OPERAND (arg0, 0),
9220 build_real (TREE_TYPE (arg1),
9221 REAL_VALUE_NEGATE (cst)));
9223 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9224 /* a CMP (-0) -> a CMP 0 */
9225 if (REAL_VALUE_MINUS_ZERO (cst))
9226 return fold_build2 (code, type, arg0,
9227 build_real (TREE_TYPE (arg1), dconst0));
9229 /* x != NaN is always true, other ops are always false. */
9230 if (REAL_VALUE_ISNAN (cst)
9231 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9233 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9234 return omit_one_operand (type, tem, arg0);
9237 /* Fold comparisons against infinity. */
9238 if (REAL_VALUE_ISINF (cst)
9239 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9241 tem = fold_inf_compare (code, type, arg0, arg1);
9242 if (tem != NULL_TREE)
9247 /* If this is a comparison of a real constant with a PLUS_EXPR
9248 or a MINUS_EXPR of a real constant, we can convert it into a
9249 comparison with a revised real constant as long as no overflow
9250 occurs when unsafe_math_optimizations are enabled. */
9251 if (flag_unsafe_math_optimizations
9252 && TREE_CODE (arg1) == REAL_CST
9253 && (TREE_CODE (arg0) == PLUS_EXPR
9254 || TREE_CODE (arg0) == MINUS_EXPR)
9255 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9256 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9257 ? MINUS_EXPR : PLUS_EXPR,
9258 arg1, TREE_OPERAND (arg0, 1), 0))
9259 && !TREE_OVERFLOW (tem))
9260 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9262 /* Likewise, we can simplify a comparison of a real constant with
9263 a MINUS_EXPR whose first operand is also a real constant, i.e.
9264 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9265 floating-point types only if -fassociative-math is set. */
9266 if (flag_associative_math
9267 && TREE_CODE (arg1) == REAL_CST
9268 && TREE_CODE (arg0) == MINUS_EXPR
9269 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9270 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9272 && !TREE_OVERFLOW (tem))
9273 return fold_build2 (swap_tree_comparison (code), type,
9274 TREE_OPERAND (arg0, 1), tem);
9276 /* Fold comparisons against built-in math functions. */
9277 if (TREE_CODE (arg1) == REAL_CST
9278 && flag_unsafe_math_optimizations
9279 && ! flag_errno_math)
9281 enum built_in_function fcode = builtin_mathfn_code (arg0);
9283 if (fcode != END_BUILTINS)
9285 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9286 if (tem != NULL_TREE)
9292 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9293 && CONVERT_EXPR_P (arg0))
9295 /* If we are widening one operand of an integer comparison,
9296 see if the other operand is similarly being widened. Perhaps we
9297 can do the comparison in the narrower type. */
9298 tem = fold_widened_comparison (code, type, arg0, arg1);
9302 /* Or if we are changing signedness. */
9303 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9308 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9309 constant, we can simplify it. */
9310 if (TREE_CODE (arg1) == INTEGER_CST
9311 && (TREE_CODE (arg0) == MIN_EXPR
9312 || TREE_CODE (arg0) == MAX_EXPR)
9313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9315 tem = optimize_minmax_comparison (code, type, op0, op1);
9320 /* Simplify comparison of something with itself. (For IEEE
9321 floating-point, we can only do some of these simplifications.) */
9322 if (operand_equal_p (arg0, arg1, 0))
9327 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9328 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9329 return constant_boolean_node (1, type);
9334 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9335 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9336 return constant_boolean_node (1, type);
9337 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9340 /* For NE, we can only do this simplification if integer
9341 or we don't honor IEEE floating point NaNs. */
9342 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9343 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9345 /* ... fall through ... */
9348 return constant_boolean_node (0, type);
9354 /* If we are comparing an expression that just has comparisons
9355 of two integer values, arithmetic expressions of those comparisons,
9356 and constants, we can simplify it. There are only three cases
9357 to check: the two values can either be equal, the first can be
9358 greater, or the second can be greater. Fold the expression for
9359 those three values. Since each value must be 0 or 1, we have
9360 eight possibilities, each of which corresponds to the constant 0
9361 or 1 or one of the six possible comparisons.
9363 This handles common cases like (a > b) == 0 but also handles
9364 expressions like ((x > y) - (y > x)) > 0, which supposedly
9365 occur in macroized code. */
9367 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9369 tree cval1 = 0, cval2 = 0;
9372 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9373 /* Don't handle degenerate cases here; they should already
9374 have been handled anyway. */
9375 && cval1 != 0 && cval2 != 0
9376 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9377 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9378 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9379 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9380 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9381 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9382 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9384 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9385 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9387 /* We can't just pass T to eval_subst in case cval1 or cval2
9388 was the same as ARG1. */
9391 = fold_build2 (code, type,
9392 eval_subst (arg0, cval1, maxval,
9396 = fold_build2 (code, type,
9397 eval_subst (arg0, cval1, maxval,
9401 = fold_build2 (code, type,
9402 eval_subst (arg0, cval1, minval,
9406 /* All three of these results should be 0 or 1. Confirm they are.
9407 Then use those values to select the proper code to use. */
9409 if (TREE_CODE (high_result) == INTEGER_CST
9410 && TREE_CODE (equal_result) == INTEGER_CST
9411 && TREE_CODE (low_result) == INTEGER_CST)
9413 /* Make a 3-bit mask with the high-order bit being the
9414 value for `>', the next for '=', and the low for '<'. */
9415 switch ((integer_onep (high_result) * 4)
9416 + (integer_onep (equal_result) * 2)
9417 + integer_onep (low_result))
9421 return omit_one_operand (type, integer_zero_node, arg0);
9442 return omit_one_operand (type, integer_one_node, arg0);
9446 return save_expr (build2 (code, type, cval1, cval2));
9447 return fold_build2 (code, type, cval1, cval2);
9452 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9453 into a single range test. */
9454 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9455 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9456 && TREE_CODE (arg1) == INTEGER_CST
9457 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9458 && !integer_zerop (TREE_OPERAND (arg0, 1))
9459 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9460 && !TREE_OVERFLOW (arg1))
9462 tem = fold_div_compare (code, type, arg0, arg1);
9463 if (tem != NULL_TREE)
9467 /* Fold ~X op ~Y as Y op X. */
9468 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9469 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9471 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9472 return fold_build2 (code, type,
9473 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9474 TREE_OPERAND (arg0, 0));
9477 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9478 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9479 && TREE_CODE (arg1) == INTEGER_CST)
9481 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9482 return fold_build2 (swap_tree_comparison (code), type,
9483 TREE_OPERAND (arg0, 0),
9484 fold_build1 (BIT_NOT_EXPR, cmp_type,
9485 fold_convert (cmp_type, arg1)));
9492 /* Subroutine of fold_binary. Optimize complex multiplications of the
9493 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9494 argument EXPR represents the expression "z" of type TYPE. */
9497 fold_mult_zconjz (tree type, tree expr)
9499 tree itype = TREE_TYPE (type);
9500 tree rpart, ipart, tem;
9502 if (TREE_CODE (expr) == COMPLEX_EXPR)
9504 rpart = TREE_OPERAND (expr, 0);
9505 ipart = TREE_OPERAND (expr, 1);
9507 else if (TREE_CODE (expr) == COMPLEX_CST)
9509 rpart = TREE_REALPART (expr);
9510 ipart = TREE_IMAGPART (expr);
9514 expr = save_expr (expr);
9515 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9516 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9519 rpart = save_expr (rpart);
9520 ipart = save_expr (ipart);
9521 tem = fold_build2 (PLUS_EXPR, itype,
9522 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9523 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9524 return fold_build2 (COMPLEX_EXPR, type, tem,
9525 fold_convert (itype, integer_zero_node));
9529 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9530 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9531 guarantees that P and N have the same least significant log2(M) bits.
9532 N is not otherwise constrained. In particular, N is not normalized to
9533 0 <= N < M as is common. In general, the precise value of P is unknown.
9534 M is chosen as large as possible such that constant N can be determined.
9536 Returns M and sets *RESIDUE to N.
9538 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9539 account. This is not always possible due to PR 35705.
9542 static unsigned HOST_WIDE_INT
9543 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9544 bool allow_func_align)
9546 enum tree_code code;
9550 code = TREE_CODE (expr);
9551 if (code == ADDR_EXPR)
9553 expr = TREE_OPERAND (expr, 0);
9554 if (handled_component_p (expr))
9556 HOST_WIDE_INT bitsize, bitpos;
9558 enum machine_mode mode;
9559 int unsignedp, volatilep;
9561 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9562 &mode, &unsignedp, &volatilep, false);
9563 *residue = bitpos / BITS_PER_UNIT;
9566 if (TREE_CODE (offset) == INTEGER_CST)
9567 *residue += TREE_INT_CST_LOW (offset);
9569 /* We don't handle more complicated offset expressions. */
9575 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9576 return DECL_ALIGN_UNIT (expr);
9578 else if (code == POINTER_PLUS_EXPR)
9581 unsigned HOST_WIDE_INT modulus;
9582 enum tree_code inner_code;
9584 op0 = TREE_OPERAND (expr, 0);
9586 modulus = get_pointer_modulus_and_residue (op0, residue,
9589 op1 = TREE_OPERAND (expr, 1);
9591 inner_code = TREE_CODE (op1);
9592 if (inner_code == INTEGER_CST)
9594 *residue += TREE_INT_CST_LOW (op1);
9597 else if (inner_code == MULT_EXPR)
9599 op1 = TREE_OPERAND (op1, 1);
9600 if (TREE_CODE (op1) == INTEGER_CST)
9602 unsigned HOST_WIDE_INT align;
9604 /* Compute the greatest power-of-2 divisor of op1. */
9605 align = TREE_INT_CST_LOW (op1);
9608 /* If align is non-zero and less than *modulus, replace
9609 *modulus with align., If align is 0, then either op1 is 0
9610 or the greatest power-of-2 divisor of op1 doesn't fit in an
9611 unsigned HOST_WIDE_INT. In either case, no additional
9612 constraint is imposed. */
9614 modulus = MIN (modulus, align);
9621 /* If we get here, we were unable to determine anything useful about the
9627 /* Fold a binary expression of code CODE and type TYPE with operands
9628 OP0 and OP1. Return the folded expression if folding is
9629 successful. Otherwise, return NULL_TREE. */
9632 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9634 enum tree_code_class kind = TREE_CODE_CLASS (code);
9635 tree arg0, arg1, tem;
9636 tree t1 = NULL_TREE;
9637 bool strict_overflow_p;
9639 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9640 && TREE_CODE_LENGTH (code) == 2
9642 && op1 != NULL_TREE);
9647 /* Strip any conversions that don't change the mode. This is
9648 safe for every expression, except for a comparison expression
9649 because its signedness is derived from its operands. So, in
9650 the latter case, only strip conversions that don't change the
9651 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9654 Note that this is done as an internal manipulation within the
9655 constant folder, in order to find the simplest representation
9656 of the arguments so that their form can be studied. In any
9657 cases, the appropriate type conversions should be put back in
9658 the tree that will get out of the constant folder. */
9660 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9662 STRIP_SIGN_NOPS (arg0);
9663 STRIP_SIGN_NOPS (arg1);
9671 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9672 constant but we can't do arithmetic on them. */
9673 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9674 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9675 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9676 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9677 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9678 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9680 if (kind == tcc_binary)
9682 /* Make sure type and arg0 have the same saturating flag. */
9683 gcc_assert (TYPE_SATURATING (type)
9684 == TYPE_SATURATING (TREE_TYPE (arg0)));
9685 tem = const_binop (code, arg0, arg1, 0);
9687 else if (kind == tcc_comparison)
9688 tem = fold_relational_const (code, type, arg0, arg1);
9692 if (tem != NULL_TREE)
9694 if (TREE_TYPE (tem) != type)
9695 tem = fold_convert (type, tem);
9700 /* If this is a commutative operation, and ARG0 is a constant, move it
9701 to ARG1 to reduce the number of tests below. */
9702 if (commutative_tree_code (code)
9703 && tree_swap_operands_p (arg0, arg1, true))
9704 return fold_build2 (code, type, op1, op0);
9706 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9708 First check for cases where an arithmetic operation is applied to a
9709 compound, conditional, or comparison operation. Push the arithmetic
9710 operation inside the compound or conditional to see if any folding
9711 can then be done. Convert comparison to conditional for this purpose.
9712 The also optimizes non-constant cases that used to be done in
9715 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9716 one of the operands is a comparison and the other is a comparison, a
9717 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9718 code below would make the expression more complex. Change it to a
9719 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9720 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9722 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9723 || code == EQ_EXPR || code == NE_EXPR)
9724 && ((truth_value_p (TREE_CODE (arg0))
9725 && (truth_value_p (TREE_CODE (arg1))
9726 || (TREE_CODE (arg1) == BIT_AND_EXPR
9727 && integer_onep (TREE_OPERAND (arg1, 1)))))
9728 || (truth_value_p (TREE_CODE (arg1))
9729 && (truth_value_p (TREE_CODE (arg0))
9730 || (TREE_CODE (arg0) == BIT_AND_EXPR
9731 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9733 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9734 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9737 fold_convert (boolean_type_node, arg0),
9738 fold_convert (boolean_type_node, arg1));
9740 if (code == EQ_EXPR)
9741 tem = invert_truthvalue (tem);
9743 return fold_convert (type, tem);
9746 if (TREE_CODE_CLASS (code) == tcc_binary
9747 || TREE_CODE_CLASS (code) == tcc_comparison)
9749 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9750 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9751 fold_build2 (code, type,
9752 fold_convert (TREE_TYPE (op0),
9753 TREE_OPERAND (arg0, 1)),
9755 if (TREE_CODE (arg1) == COMPOUND_EXPR
9756 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9757 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9758 fold_build2 (code, type, op0,
9759 fold_convert (TREE_TYPE (op1),
9760 TREE_OPERAND (arg1, 1))));
9762 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9764 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9766 /*cond_first_p=*/1);
9767 if (tem != NULL_TREE)
9771 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9773 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9775 /*cond_first_p=*/0);
9776 if (tem != NULL_TREE)
9783 case POINTER_PLUS_EXPR:
9784 /* 0 +p index -> (type)index */
9785 if (integer_zerop (arg0))
9786 return non_lvalue (fold_convert (type, arg1));
9788 /* PTR +p 0 -> PTR */
9789 if (integer_zerop (arg1))
9790 return non_lvalue (fold_convert (type, arg0));
9792 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9793 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9794 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9795 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9796 fold_convert (sizetype, arg1),
9797 fold_convert (sizetype, arg0)));
9799 /* index +p PTR -> PTR +p index */
9800 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9801 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9802 return fold_build2 (POINTER_PLUS_EXPR, type,
9803 fold_convert (type, arg1),
9804 fold_convert (sizetype, arg0));
9806 /* (PTR +p B) +p A -> PTR +p (B + A) */
9807 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9810 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9811 tree arg00 = TREE_OPERAND (arg0, 0);
9812 inner = fold_build2 (PLUS_EXPR, sizetype,
9813 arg01, fold_convert (sizetype, arg1));
9814 return fold_convert (type,
9815 fold_build2 (POINTER_PLUS_EXPR,
9816 TREE_TYPE (arg00), arg00, inner));
9819 /* PTR_CST +p CST -> CST1 */
9820 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9821 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9823 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9824 of the array. Loop optimizer sometimes produce this type of
9826 if (TREE_CODE (arg0) == ADDR_EXPR)
9828 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9830 return fold_convert (type, tem);
9836 /* A + (-B) -> A - B */
9837 if (TREE_CODE (arg1) == NEGATE_EXPR)
9838 return fold_build2 (MINUS_EXPR, type,
9839 fold_convert (type, arg0),
9840 fold_convert (type, TREE_OPERAND (arg1, 0)));
9841 /* (-A) + B -> B - A */
9842 if (TREE_CODE (arg0) == NEGATE_EXPR
9843 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9844 return fold_build2 (MINUS_EXPR, type,
9845 fold_convert (type, arg1),
9846 fold_convert (type, TREE_OPERAND (arg0, 0)));
9848 if (INTEGRAL_TYPE_P (type))
9850 /* Convert ~A + 1 to -A. */
9851 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9852 && integer_onep (arg1))
9853 return fold_build1 (NEGATE_EXPR, type,
9854 fold_convert (type, TREE_OPERAND (arg0, 0)));
9857 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9858 && !TYPE_OVERFLOW_TRAPS (type))
9860 tree tem = TREE_OPERAND (arg0, 0);
9863 if (operand_equal_p (tem, arg1, 0))
9865 t1 = build_int_cst_type (type, -1);
9866 return omit_one_operand (type, t1, arg1);
9871 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9872 && !TYPE_OVERFLOW_TRAPS (type))
9874 tree tem = TREE_OPERAND (arg1, 0);
9877 if (operand_equal_p (arg0, tem, 0))
9879 t1 = build_int_cst_type (type, -1);
9880 return omit_one_operand (type, t1, arg0);
9884 /* X + (X / CST) * -CST is X % CST. */
9885 if (TREE_CODE (arg1) == MULT_EXPR
9886 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9887 && operand_equal_p (arg0,
9888 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9890 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9891 tree cst1 = TREE_OPERAND (arg1, 1);
9892 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9893 if (sum && integer_zerop (sum))
9894 return fold_convert (type,
9895 fold_build2 (TRUNC_MOD_EXPR,
9896 TREE_TYPE (arg0), arg0, cst0));
9900 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9901 same or one. Make sure type is not saturating.
9902 fold_plusminus_mult_expr will re-associate. */
9903 if ((TREE_CODE (arg0) == MULT_EXPR
9904 || TREE_CODE (arg1) == MULT_EXPR)
9905 && !TYPE_SATURATING (type)
9906 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9908 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9913 if (! FLOAT_TYPE_P (type))
9915 if (integer_zerop (arg1))
9916 return non_lvalue (fold_convert (type, arg0));
9918 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9919 with a constant, and the two constants have no bits in common,
9920 we should treat this as a BIT_IOR_EXPR since this may produce more
9922 if (TREE_CODE (arg0) == BIT_AND_EXPR
9923 && TREE_CODE (arg1) == BIT_AND_EXPR
9924 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9925 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9926 && integer_zerop (const_binop (BIT_AND_EXPR,
9927 TREE_OPERAND (arg0, 1),
9928 TREE_OPERAND (arg1, 1), 0)))
9930 code = BIT_IOR_EXPR;
9934 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9935 (plus (plus (mult) (mult)) (foo)) so that we can
9936 take advantage of the factoring cases below. */
9937 if (((TREE_CODE (arg0) == PLUS_EXPR
9938 || TREE_CODE (arg0) == MINUS_EXPR)
9939 && TREE_CODE (arg1) == MULT_EXPR)
9940 || ((TREE_CODE (arg1) == PLUS_EXPR
9941 || TREE_CODE (arg1) == MINUS_EXPR)
9942 && TREE_CODE (arg0) == MULT_EXPR))
9944 tree parg0, parg1, parg, marg;
9945 enum tree_code pcode;
9947 if (TREE_CODE (arg1) == MULT_EXPR)
9948 parg = arg0, marg = arg1;
9950 parg = arg1, marg = arg0;
9951 pcode = TREE_CODE (parg);
9952 parg0 = TREE_OPERAND (parg, 0);
9953 parg1 = TREE_OPERAND (parg, 1);
9957 if (TREE_CODE (parg0) == MULT_EXPR
9958 && TREE_CODE (parg1) != MULT_EXPR)
9959 return fold_build2 (pcode, type,
9960 fold_build2 (PLUS_EXPR, type,
9961 fold_convert (type, parg0),
9962 fold_convert (type, marg)),
9963 fold_convert (type, parg1));
9964 if (TREE_CODE (parg0) != MULT_EXPR
9965 && TREE_CODE (parg1) == MULT_EXPR)
9966 return fold_build2 (PLUS_EXPR, type,
9967 fold_convert (type, parg0),
9968 fold_build2 (pcode, type,
9969 fold_convert (type, marg),
9976 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9977 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9978 return non_lvalue (fold_convert (type, arg0));
9980 /* Likewise if the operands are reversed. */
9981 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9982 return non_lvalue (fold_convert (type, arg1));
9984 /* Convert X + -C into X - C. */
9985 if (TREE_CODE (arg1) == REAL_CST
9986 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9988 tem = fold_negate_const (arg1, type);
9989 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9990 return fold_build2 (MINUS_EXPR, type,
9991 fold_convert (type, arg0),
9992 fold_convert (type, tem));
9995 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9996 to __complex__ ( x, y ). This is not the same for SNaNs or
9997 if signed zeros are involved. */
9998 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9999 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10000 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10002 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10003 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10004 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10005 bool arg0rz = false, arg0iz = false;
10006 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10007 || (arg0i && (arg0iz = real_zerop (arg0i))))
10009 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10010 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10011 if (arg0rz && arg1i && real_zerop (arg1i))
10013 tree rp = arg1r ? arg1r
10014 : build1 (REALPART_EXPR, rtype, arg1);
10015 tree ip = arg0i ? arg0i
10016 : build1 (IMAGPART_EXPR, rtype, arg0);
10017 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10019 else if (arg0iz && arg1r && real_zerop (arg1r))
10021 tree rp = arg0r ? arg0r
10022 : build1 (REALPART_EXPR, rtype, arg0);
10023 tree ip = arg1i ? arg1i
10024 : build1 (IMAGPART_EXPR, rtype, arg1);
10025 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10030 if (flag_unsafe_math_optimizations
10031 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10032 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10033 && (tem = distribute_real_division (code, type, arg0, arg1)))
10036 /* Convert x+x into x*2.0. */
10037 if (operand_equal_p (arg0, arg1, 0)
10038 && SCALAR_FLOAT_TYPE_P (type))
10039 return fold_build2 (MULT_EXPR, type, arg0,
10040 build_real (type, dconst2));
10042 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10043 We associate floats only if the user has specified
10044 -fassociative-math. */
10045 if (flag_associative_math
10046 && TREE_CODE (arg1) == PLUS_EXPR
10047 && TREE_CODE (arg0) != MULT_EXPR)
10049 tree tree10 = TREE_OPERAND (arg1, 0);
10050 tree tree11 = TREE_OPERAND (arg1, 1);
10051 if (TREE_CODE (tree11) == MULT_EXPR
10052 && TREE_CODE (tree10) == MULT_EXPR)
10055 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10056 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10059 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10060 We associate floats only if the user has specified
10061 -fassociative-math. */
10062 if (flag_associative_math
10063 && TREE_CODE (arg0) == PLUS_EXPR
10064 && TREE_CODE (arg1) != MULT_EXPR)
10066 tree tree00 = TREE_OPERAND (arg0, 0);
10067 tree tree01 = TREE_OPERAND (arg0, 1);
10068 if (TREE_CODE (tree01) == MULT_EXPR
10069 && TREE_CODE (tree00) == MULT_EXPR)
10072 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10073 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10079 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10080 is a rotate of A by C1 bits. */
10081 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10082 is a rotate of A by B bits. */
10084 enum tree_code code0, code1;
10086 code0 = TREE_CODE (arg0);
10087 code1 = TREE_CODE (arg1);
10088 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10089 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10090 && operand_equal_p (TREE_OPERAND (arg0, 0),
10091 TREE_OPERAND (arg1, 0), 0)
10092 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10093 TYPE_UNSIGNED (rtype))
10094 /* Only create rotates in complete modes. Other cases are not
10095 expanded properly. */
10096 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10098 tree tree01, tree11;
10099 enum tree_code code01, code11;
10101 tree01 = TREE_OPERAND (arg0, 1);
10102 tree11 = TREE_OPERAND (arg1, 1);
10103 STRIP_NOPS (tree01);
10104 STRIP_NOPS (tree11);
10105 code01 = TREE_CODE (tree01);
10106 code11 = TREE_CODE (tree11);
10107 if (code01 == INTEGER_CST
10108 && code11 == INTEGER_CST
10109 && TREE_INT_CST_HIGH (tree01) == 0
10110 && TREE_INT_CST_HIGH (tree11) == 0
10111 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10112 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10113 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
10114 code0 == LSHIFT_EXPR ? tree01 : tree11);
10115 else if (code11 == MINUS_EXPR)
10117 tree tree110, tree111;
10118 tree110 = TREE_OPERAND (tree11, 0);
10119 tree111 = TREE_OPERAND (tree11, 1);
10120 STRIP_NOPS (tree110);
10121 STRIP_NOPS (tree111);
10122 if (TREE_CODE (tree110) == INTEGER_CST
10123 && 0 == compare_tree_int (tree110,
10125 (TREE_TYPE (TREE_OPERAND
10127 && operand_equal_p (tree01, tree111, 0))
10128 return build2 ((code0 == LSHIFT_EXPR
10131 type, TREE_OPERAND (arg0, 0), tree01);
10133 else if (code01 == MINUS_EXPR)
10135 tree tree010, tree011;
10136 tree010 = TREE_OPERAND (tree01, 0);
10137 tree011 = TREE_OPERAND (tree01, 1);
10138 STRIP_NOPS (tree010);
10139 STRIP_NOPS (tree011);
10140 if (TREE_CODE (tree010) == INTEGER_CST
10141 && 0 == compare_tree_int (tree010,
10143 (TREE_TYPE (TREE_OPERAND
10145 && operand_equal_p (tree11, tree011, 0))
10146 return build2 ((code0 != LSHIFT_EXPR
10149 type, TREE_OPERAND (arg0, 0), tree11);
10155 /* In most languages, can't associate operations on floats through
10156 parentheses. Rather than remember where the parentheses were, we
10157 don't associate floats at all, unless the user has specified
10158 -fassociative-math.
10159 And, we need to make sure type is not saturating. */
10161 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10162 && !TYPE_SATURATING (type))
10164 tree var0, con0, lit0, minus_lit0;
10165 tree var1, con1, lit1, minus_lit1;
10168 /* Split both trees into variables, constants, and literals. Then
10169 associate each group together, the constants with literals,
10170 then the result with variables. This increases the chances of
10171 literals being recombined later and of generating relocatable
10172 expressions for the sum of a constant and literal. */
10173 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10174 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10175 code == MINUS_EXPR);
10177 /* With undefined overflow we can only associate constants
10178 with one variable. */
10179 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10180 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10186 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10187 tmp0 = TREE_OPERAND (tmp0, 0);
10188 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10189 tmp1 = TREE_OPERAND (tmp1, 0);
10190 /* The only case we can still associate with two variables
10191 is if they are the same, modulo negation. */
10192 if (!operand_equal_p (tmp0, tmp1, 0))
10196 /* Only do something if we found more than two objects. Otherwise,
10197 nothing has changed and we risk infinite recursion. */
10199 && (2 < ((var0 != 0) + (var1 != 0)
10200 + (con0 != 0) + (con1 != 0)
10201 + (lit0 != 0) + (lit1 != 0)
10202 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10204 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10205 if (code == MINUS_EXPR)
10208 var0 = associate_trees (var0, var1, code, type);
10209 con0 = associate_trees (con0, con1, code, type);
10210 lit0 = associate_trees (lit0, lit1, code, type);
10211 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10213 /* Preserve the MINUS_EXPR if the negative part of the literal is
10214 greater than the positive part. Otherwise, the multiplicative
10215 folding code (i.e extract_muldiv) may be fooled in case
10216 unsigned constants are subtracted, like in the following
10217 example: ((X*2 + 4) - 8U)/2. */
10218 if (minus_lit0 && lit0)
10220 if (TREE_CODE (lit0) == INTEGER_CST
10221 && TREE_CODE (minus_lit0) == INTEGER_CST
10222 && tree_int_cst_lt (lit0, minus_lit0))
10224 minus_lit0 = associate_trees (minus_lit0, lit0,
10230 lit0 = associate_trees (lit0, minus_lit0,
10238 return fold_convert (type,
10239 associate_trees (var0, minus_lit0,
10240 MINUS_EXPR, type));
10243 con0 = associate_trees (con0, minus_lit0,
10245 return fold_convert (type,
10246 associate_trees (var0, con0,
10251 con0 = associate_trees (con0, lit0, code, type);
10252 return fold_convert (type, associate_trees (var0, con0,
10260 /* Pointer simplifications for subtraction, simple reassociations. */
10261 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10263 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10264 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10265 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10267 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10268 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10269 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10270 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10271 return fold_build2 (PLUS_EXPR, type,
10272 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10273 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10275 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10276 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10278 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10279 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10280 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10282 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10285 /* A - (-B) -> A + B */
10286 if (TREE_CODE (arg1) == NEGATE_EXPR)
10287 return fold_build2 (PLUS_EXPR, type, op0,
10288 fold_convert (type, TREE_OPERAND (arg1, 0)));
10289 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10290 if (TREE_CODE (arg0) == NEGATE_EXPR
10291 && (FLOAT_TYPE_P (type)
10292 || INTEGRAL_TYPE_P (type))
10293 && negate_expr_p (arg1)
10294 && reorder_operands_p (arg0, arg1))
10295 return fold_build2 (MINUS_EXPR, type,
10296 fold_convert (type, negate_expr (arg1)),
10297 fold_convert (type, TREE_OPERAND (arg0, 0)));
10298 /* Convert -A - 1 to ~A. */
10299 if (INTEGRAL_TYPE_P (type)
10300 && TREE_CODE (arg0) == NEGATE_EXPR
10301 && integer_onep (arg1)
10302 && !TYPE_OVERFLOW_TRAPS (type))
10303 return fold_build1 (BIT_NOT_EXPR, type,
10304 fold_convert (type, TREE_OPERAND (arg0, 0)));
10306 /* Convert -1 - A to ~A. */
10307 if (INTEGRAL_TYPE_P (type)
10308 && integer_all_onesp (arg0))
10309 return fold_build1 (BIT_NOT_EXPR, type, op1);
10312 /* X - (X / CST) * CST is X % CST. */
10313 if (INTEGRAL_TYPE_P (type)
10314 && TREE_CODE (arg1) == MULT_EXPR
10315 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10316 && operand_equal_p (arg0,
10317 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10318 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10319 TREE_OPERAND (arg1, 1), 0))
10320 return fold_convert (type,
10321 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10322 arg0, TREE_OPERAND (arg1, 1)));
10324 if (! FLOAT_TYPE_P (type))
10326 if (integer_zerop (arg0))
10327 return negate_expr (fold_convert (type, arg1));
10328 if (integer_zerop (arg1))
10329 return non_lvalue (fold_convert (type, arg0));
10331 /* Fold A - (A & B) into ~B & A. */
10332 if (!TREE_SIDE_EFFECTS (arg0)
10333 && TREE_CODE (arg1) == BIT_AND_EXPR)
10335 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10337 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10338 return fold_build2 (BIT_AND_EXPR, type,
10339 fold_build1 (BIT_NOT_EXPR, type, arg10),
10340 fold_convert (type, arg0));
10342 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10344 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10345 return fold_build2 (BIT_AND_EXPR, type,
10346 fold_build1 (BIT_NOT_EXPR, type, arg11),
10347 fold_convert (type, arg0));
10351 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10352 any power of 2 minus 1. */
10353 if (TREE_CODE (arg0) == BIT_AND_EXPR
10354 && TREE_CODE (arg1) == BIT_AND_EXPR
10355 && operand_equal_p (TREE_OPERAND (arg0, 0),
10356 TREE_OPERAND (arg1, 0), 0))
10358 tree mask0 = TREE_OPERAND (arg0, 1);
10359 tree mask1 = TREE_OPERAND (arg1, 1);
10360 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10362 if (operand_equal_p (tem, mask1, 0))
10364 tem = fold_build2 (BIT_XOR_EXPR, type,
10365 TREE_OPERAND (arg0, 0), mask1);
10366 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10371 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10372 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10373 return non_lvalue (fold_convert (type, arg0));
10375 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10376 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10377 (-ARG1 + ARG0) reduces to -ARG1. */
10378 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10379 return negate_expr (fold_convert (type, arg1));
10381 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10382 __complex__ ( x, -y ). This is not the same for SNaNs or if
10383 signed zeros are involved. */
10384 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10385 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10386 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10388 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10389 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10390 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10391 bool arg0rz = false, arg0iz = false;
10392 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10393 || (arg0i && (arg0iz = real_zerop (arg0i))))
10395 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10396 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10397 if (arg0rz && arg1i && real_zerop (arg1i))
10399 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10401 : build1 (REALPART_EXPR, rtype, arg1));
10402 tree ip = arg0i ? arg0i
10403 : build1 (IMAGPART_EXPR, rtype, arg0);
10404 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10406 else if (arg0iz && arg1r && real_zerop (arg1r))
10408 tree rp = arg0r ? arg0r
10409 : build1 (REALPART_EXPR, rtype, arg0);
10410 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10412 : build1 (IMAGPART_EXPR, rtype, arg1));
10413 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10418 /* Fold &x - &x. This can happen from &x.foo - &x.
10419 This is unsafe for certain floats even in non-IEEE formats.
10420 In IEEE, it is unsafe because it does wrong for NaNs.
10421 Also note that operand_equal_p is always false if an operand
10424 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10425 && operand_equal_p (arg0, arg1, 0))
10426 return fold_convert (type, integer_zero_node);
10428 /* A - B -> A + (-B) if B is easily negatable. */
10429 if (negate_expr_p (arg1)
10430 && ((FLOAT_TYPE_P (type)
10431 /* Avoid this transformation if B is a positive REAL_CST. */
10432 && (TREE_CODE (arg1) != REAL_CST
10433 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10434 || INTEGRAL_TYPE_P (type)))
10435 return fold_build2 (PLUS_EXPR, type,
10436 fold_convert (type, arg0),
10437 fold_convert (type, negate_expr (arg1)));
10439 /* Try folding difference of addresses. */
10441 HOST_WIDE_INT diff;
10443 if ((TREE_CODE (arg0) == ADDR_EXPR
10444 || TREE_CODE (arg1) == ADDR_EXPR)
10445 && ptr_difference_const (arg0, arg1, &diff))
10446 return build_int_cst_type (type, diff);
10449 /* Fold &a[i] - &a[j] to i-j. */
10450 if (TREE_CODE (arg0) == ADDR_EXPR
10451 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10452 && TREE_CODE (arg1) == ADDR_EXPR
10453 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10455 tree aref0 = TREE_OPERAND (arg0, 0);
10456 tree aref1 = TREE_OPERAND (arg1, 0);
10457 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10458 TREE_OPERAND (aref1, 0), 0))
10460 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10461 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10462 tree esz = array_ref_element_size (aref0);
10463 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10464 return fold_build2 (MULT_EXPR, type, diff,
10465 fold_convert (type, esz));
10470 if (flag_unsafe_math_optimizations
10471 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10472 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10473 && (tem = distribute_real_division (code, type, arg0, arg1)))
10476 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10477 same or one. Make sure type is not saturating.
10478 fold_plusminus_mult_expr will re-associate. */
10479 if ((TREE_CODE (arg0) == MULT_EXPR
10480 || TREE_CODE (arg1) == MULT_EXPR)
10481 && !TYPE_SATURATING (type)
10482 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10484 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10492 /* (-A) * (-B) -> A * B */
10493 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10494 return fold_build2 (MULT_EXPR, type,
10495 fold_convert (type, TREE_OPERAND (arg0, 0)),
10496 fold_convert (type, negate_expr (arg1)));
10497 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10498 return fold_build2 (MULT_EXPR, type,
10499 fold_convert (type, negate_expr (arg0)),
10500 fold_convert (type, TREE_OPERAND (arg1, 0)));
10502 if (! FLOAT_TYPE_P (type))
10504 if (integer_zerop (arg1))
10505 return omit_one_operand (type, arg1, arg0);
10506 if (integer_onep (arg1))
10507 return non_lvalue (fold_convert (type, arg0));
10508 /* Transform x * -1 into -x. Make sure to do the negation
10509 on the original operand with conversions not stripped
10510 because we can only strip non-sign-changing conversions. */
10511 if (integer_all_onesp (arg1))
10512 return fold_convert (type, negate_expr (op0));
10513 /* Transform x * -C into -x * C if x is easily negatable. */
10514 if (TREE_CODE (arg1) == INTEGER_CST
10515 && tree_int_cst_sgn (arg1) == -1
10516 && negate_expr_p (arg0)
10517 && (tem = negate_expr (arg1)) != arg1
10518 && !TREE_OVERFLOW (tem))
10519 return fold_build2 (MULT_EXPR, type,
10520 fold_convert (type, negate_expr (arg0)), tem);
10522 /* (a * (1 << b)) is (a << b) */
10523 if (TREE_CODE (arg1) == LSHIFT_EXPR
10524 && integer_onep (TREE_OPERAND (arg1, 0)))
10525 return fold_build2 (LSHIFT_EXPR, type, op0,
10526 TREE_OPERAND (arg1, 1));
10527 if (TREE_CODE (arg0) == LSHIFT_EXPR
10528 && integer_onep (TREE_OPERAND (arg0, 0)))
10529 return fold_build2 (LSHIFT_EXPR, type, op1,
10530 TREE_OPERAND (arg0, 1));
10532 /* (A + A) * C -> A * 2 * C */
10533 if (TREE_CODE (arg0) == PLUS_EXPR
10534 && TREE_CODE (arg1) == INTEGER_CST
10535 && operand_equal_p (TREE_OPERAND (arg0, 0),
10536 TREE_OPERAND (arg0, 1), 0))
10537 return fold_build2 (MULT_EXPR, type,
10538 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10539 TREE_OPERAND (arg0, 1)),
10540 fold_build2 (MULT_EXPR, type,
10541 build_int_cst (type, 2) , arg1));
10543 strict_overflow_p = false;
10544 if (TREE_CODE (arg1) == INTEGER_CST
10545 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10546 &strict_overflow_p)))
10548 if (strict_overflow_p)
10549 fold_overflow_warning (("assuming signed overflow does not "
10550 "occur when simplifying "
10552 WARN_STRICT_OVERFLOW_MISC);
10553 return fold_convert (type, tem);
10556 /* Optimize z * conj(z) for integer complex numbers. */
10557 if (TREE_CODE (arg0) == CONJ_EXPR
10558 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10559 return fold_mult_zconjz (type, arg1);
10560 if (TREE_CODE (arg1) == CONJ_EXPR
10561 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10562 return fold_mult_zconjz (type, arg0);
10566 /* Maybe fold x * 0 to 0. The expressions aren't the same
10567 when x is NaN, since x * 0 is also NaN. Nor are they the
10568 same in modes with signed zeros, since multiplying a
10569 negative value by 0 gives -0, not +0. */
10570 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10571 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10572 && real_zerop (arg1))
10573 return omit_one_operand (type, arg1, arg0);
10574 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10575 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10576 && real_onep (arg1))
10577 return non_lvalue (fold_convert (type, arg0));
10579 /* Transform x * -1.0 into -x. */
10580 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10581 && real_minus_onep (arg1))
10582 return fold_convert (type, negate_expr (arg0));
10584 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10585 the result for floating point types due to rounding so it is applied
10586 only if -fassociative-math was specify. */
10587 if (flag_associative_math
10588 && TREE_CODE (arg0) == RDIV_EXPR
10589 && TREE_CODE (arg1) == REAL_CST
10590 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10592 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10595 return fold_build2 (RDIV_EXPR, type, tem,
10596 TREE_OPERAND (arg0, 1));
10599 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10600 if (operand_equal_p (arg0, arg1, 0))
10602 tree tem = fold_strip_sign_ops (arg0);
10603 if (tem != NULL_TREE)
10605 tem = fold_convert (type, tem);
10606 return fold_build2 (MULT_EXPR, type, tem, tem);
10610 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10611 This is not the same for NaNs or if signed zeros are
10613 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10614 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10615 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10616 && TREE_CODE (arg1) == COMPLEX_CST
10617 && real_zerop (TREE_REALPART (arg1)))
10619 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10620 if (real_onep (TREE_IMAGPART (arg1)))
10621 return fold_build2 (COMPLEX_EXPR, type,
10622 negate_expr (fold_build1 (IMAGPART_EXPR,
10624 fold_build1 (REALPART_EXPR, rtype, arg0));
10625 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10626 return fold_build2 (COMPLEX_EXPR, type,
10627 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10628 negate_expr (fold_build1 (REALPART_EXPR,
10632 /* Optimize z * conj(z) for floating point complex numbers.
10633 Guarded by flag_unsafe_math_optimizations as non-finite
10634 imaginary components don't produce scalar results. */
10635 if (flag_unsafe_math_optimizations
10636 && TREE_CODE (arg0) == CONJ_EXPR
10637 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10638 return fold_mult_zconjz (type, arg1);
10639 if (flag_unsafe_math_optimizations
10640 && TREE_CODE (arg1) == CONJ_EXPR
10641 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10642 return fold_mult_zconjz (type, arg0);
10644 if (flag_unsafe_math_optimizations)
10646 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10647 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10649 /* Optimizations of root(...)*root(...). */
10650 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10653 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10654 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10656 /* Optimize sqrt(x)*sqrt(x) as x. */
10657 if (BUILTIN_SQRT_P (fcode0)
10658 && operand_equal_p (arg00, arg10, 0)
10659 && ! HONOR_SNANS (TYPE_MODE (type)))
10662 /* Optimize root(x)*root(y) as root(x*y). */
10663 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10664 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10665 return build_call_expr (rootfn, 1, arg);
10668 /* Optimize expN(x)*expN(y) as expN(x+y). */
10669 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10671 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10672 tree arg = fold_build2 (PLUS_EXPR, type,
10673 CALL_EXPR_ARG (arg0, 0),
10674 CALL_EXPR_ARG (arg1, 0));
10675 return build_call_expr (expfn, 1, arg);
10678 /* Optimizations of pow(...)*pow(...). */
10679 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10680 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10681 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10683 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10684 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10685 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10686 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10688 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10689 if (operand_equal_p (arg01, arg11, 0))
10691 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10692 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10693 return build_call_expr (powfn, 2, arg, arg01);
10696 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10697 if (operand_equal_p (arg00, arg10, 0))
10699 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10700 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10701 return build_call_expr (powfn, 2, arg00, arg);
10705 /* Optimize tan(x)*cos(x) as sin(x). */
10706 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10707 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10708 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10709 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10710 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10711 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10712 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10713 CALL_EXPR_ARG (arg1, 0), 0))
10715 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10717 if (sinfn != NULL_TREE)
10718 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10721 /* Optimize x*pow(x,c) as pow(x,c+1). */
10722 if (fcode1 == BUILT_IN_POW
10723 || fcode1 == BUILT_IN_POWF
10724 || fcode1 == BUILT_IN_POWL)
10726 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10727 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10728 if (TREE_CODE (arg11) == REAL_CST
10729 && !TREE_OVERFLOW (arg11)
10730 && operand_equal_p (arg0, arg10, 0))
10732 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10736 c = TREE_REAL_CST (arg11);
10737 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10738 arg = build_real (type, c);
10739 return build_call_expr (powfn, 2, arg0, arg);
10743 /* Optimize pow(x,c)*x as pow(x,c+1). */
10744 if (fcode0 == BUILT_IN_POW
10745 || fcode0 == BUILT_IN_POWF
10746 || fcode0 == BUILT_IN_POWL)
10748 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10749 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10750 if (TREE_CODE (arg01) == REAL_CST
10751 && !TREE_OVERFLOW (arg01)
10752 && operand_equal_p (arg1, arg00, 0))
10754 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10758 c = TREE_REAL_CST (arg01);
10759 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10760 arg = build_real (type, c);
10761 return build_call_expr (powfn, 2, arg1, arg);
10765 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10766 if (optimize_function_for_speed_p (cfun)
10767 && operand_equal_p (arg0, arg1, 0))
10769 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10773 tree arg = build_real (type, dconst2);
10774 return build_call_expr (powfn, 2, arg0, arg);
10783 if (integer_all_onesp (arg1))
10784 return omit_one_operand (type, arg1, arg0);
10785 if (integer_zerop (arg1))
10786 return non_lvalue (fold_convert (type, arg0));
10787 if (operand_equal_p (arg0, arg1, 0))
10788 return non_lvalue (fold_convert (type, arg0));
10790 /* ~X | X is -1. */
10791 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10792 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10794 t1 = fold_convert (type, integer_zero_node);
10795 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10796 return omit_one_operand (type, t1, arg1);
10799 /* X | ~X is -1. */
10800 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10801 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10803 t1 = fold_convert (type, integer_zero_node);
10804 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10805 return omit_one_operand (type, t1, arg0);
10808 /* Canonicalize (X & C1) | C2. */
10809 if (TREE_CODE (arg0) == BIT_AND_EXPR
10810 && TREE_CODE (arg1) == INTEGER_CST
10811 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10813 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10814 int width = TYPE_PRECISION (type), w;
10815 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10816 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10817 hi2 = TREE_INT_CST_HIGH (arg1);
10818 lo2 = TREE_INT_CST_LOW (arg1);
10820 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10821 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10822 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10824 if (width > HOST_BITS_PER_WIDE_INT)
10826 mhi = (unsigned HOST_WIDE_INT) -1
10827 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10833 mlo = (unsigned HOST_WIDE_INT) -1
10834 >> (HOST_BITS_PER_WIDE_INT - width);
10837 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10838 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10839 return fold_build2 (BIT_IOR_EXPR, type,
10840 TREE_OPERAND (arg0, 0), arg1);
10842 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10843 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10844 mode which allows further optimizations. */
10851 for (w = BITS_PER_UNIT;
10852 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10855 unsigned HOST_WIDE_INT mask
10856 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10857 if (((lo1 | lo2) & mask) == mask
10858 && (lo1 & ~mask) == 0 && hi1 == 0)
10865 if (hi3 != hi1 || lo3 != lo1)
10866 return fold_build2 (BIT_IOR_EXPR, type,
10867 fold_build2 (BIT_AND_EXPR, type,
10868 TREE_OPERAND (arg0, 0),
10869 build_int_cst_wide (type,
10874 /* (X & Y) | Y is (X, Y). */
10875 if (TREE_CODE (arg0) == BIT_AND_EXPR
10876 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10877 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10878 /* (X & Y) | X is (Y, X). */
10879 if (TREE_CODE (arg0) == BIT_AND_EXPR
10880 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10881 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10882 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10883 /* X | (X & Y) is (Y, X). */
10884 if (TREE_CODE (arg1) == BIT_AND_EXPR
10885 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10886 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10887 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10888 /* X | (Y & X) is (Y, X). */
10889 if (TREE_CODE (arg1) == BIT_AND_EXPR
10890 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10891 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10892 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10894 t1 = distribute_bit_expr (code, type, arg0, arg1);
10895 if (t1 != NULL_TREE)
10898 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10900 This results in more efficient code for machines without a NAND
10901 instruction. Combine will canonicalize to the first form
10902 which will allow use of NAND instructions provided by the
10903 backend if they exist. */
10904 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10905 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10907 return fold_build1 (BIT_NOT_EXPR, type,
10908 build2 (BIT_AND_EXPR, type,
10909 fold_convert (type,
10910 TREE_OPERAND (arg0, 0)),
10911 fold_convert (type,
10912 TREE_OPERAND (arg1, 0))));
10915 /* See if this can be simplified into a rotate first. If that
10916 is unsuccessful continue in the association code. */
10920 if (integer_zerop (arg1))
10921 return non_lvalue (fold_convert (type, arg0));
10922 if (integer_all_onesp (arg1))
10923 return fold_build1 (BIT_NOT_EXPR, type, op0);
10924 if (operand_equal_p (arg0, arg1, 0))
10925 return omit_one_operand (type, integer_zero_node, arg0);
10927 /* ~X ^ X is -1. */
10928 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10929 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10931 t1 = fold_convert (type, integer_zero_node);
10932 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10933 return omit_one_operand (type, t1, arg1);
10936 /* X ^ ~X is -1. */
10937 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10938 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10940 t1 = fold_convert (type, integer_zero_node);
10941 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10942 return omit_one_operand (type, t1, arg0);
10945 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10946 with a constant, and the two constants have no bits in common,
10947 we should treat this as a BIT_IOR_EXPR since this may produce more
10948 simplifications. */
10949 if (TREE_CODE (arg0) == BIT_AND_EXPR
10950 && TREE_CODE (arg1) == BIT_AND_EXPR
10951 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10952 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10953 && integer_zerop (const_binop (BIT_AND_EXPR,
10954 TREE_OPERAND (arg0, 1),
10955 TREE_OPERAND (arg1, 1), 0)))
10957 code = BIT_IOR_EXPR;
10961 /* (X | Y) ^ X -> Y & ~ X*/
10962 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10963 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10965 tree t2 = TREE_OPERAND (arg0, 1);
10966 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10968 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10969 fold_convert (type, t1));
10973 /* (Y | X) ^ X -> Y & ~ X*/
10974 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10975 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10977 tree t2 = TREE_OPERAND (arg0, 0);
10978 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10980 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10981 fold_convert (type, t1));
10985 /* X ^ (X | Y) -> Y & ~ X*/
10986 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10987 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10989 tree t2 = TREE_OPERAND (arg1, 1);
10990 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10992 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10993 fold_convert (type, t1));
10997 /* X ^ (Y | X) -> Y & ~ X*/
10998 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10999 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11001 tree t2 = TREE_OPERAND (arg1, 0);
11002 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11004 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11005 fold_convert (type, t1));
11009 /* Convert ~X ^ ~Y to X ^ Y. */
11010 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11011 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11012 return fold_build2 (code, type,
11013 fold_convert (type, TREE_OPERAND (arg0, 0)),
11014 fold_convert (type, TREE_OPERAND (arg1, 0)));
11016 /* Convert ~X ^ C to X ^ ~C. */
11017 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11018 && TREE_CODE (arg1) == INTEGER_CST)
11019 return fold_build2 (code, type,
11020 fold_convert (type, TREE_OPERAND (arg0, 0)),
11021 fold_build1 (BIT_NOT_EXPR, type, arg1));
11023 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11024 if (TREE_CODE (arg0) == BIT_AND_EXPR
11025 && integer_onep (TREE_OPERAND (arg0, 1))
11026 && integer_onep (arg1))
11027 return fold_build2 (EQ_EXPR, type, arg0,
11028 build_int_cst (TREE_TYPE (arg0), 0));
11030 /* Fold (X & Y) ^ Y as ~X & Y. */
11031 if (TREE_CODE (arg0) == BIT_AND_EXPR
11032 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11034 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11035 return fold_build2 (BIT_AND_EXPR, type,
11036 fold_build1 (BIT_NOT_EXPR, type, tem),
11037 fold_convert (type, arg1));
11039 /* Fold (X & Y) ^ X as ~Y & X. */
11040 if (TREE_CODE (arg0) == BIT_AND_EXPR
11041 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11042 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11044 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11045 return fold_build2 (BIT_AND_EXPR, type,
11046 fold_build1 (BIT_NOT_EXPR, type, tem),
11047 fold_convert (type, arg1));
11049 /* Fold X ^ (X & Y) as X & ~Y. */
11050 if (TREE_CODE (arg1) == BIT_AND_EXPR
11051 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11053 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11054 return fold_build2 (BIT_AND_EXPR, type,
11055 fold_convert (type, arg0),
11056 fold_build1 (BIT_NOT_EXPR, type, tem));
11058 /* Fold X ^ (Y & X) as ~Y & X. */
11059 if (TREE_CODE (arg1) == BIT_AND_EXPR
11060 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11061 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11063 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11064 return fold_build2 (BIT_AND_EXPR, type,
11065 fold_build1 (BIT_NOT_EXPR, type, tem),
11066 fold_convert (type, arg0));
11069 /* See if this can be simplified into a rotate first. If that
11070 is unsuccessful continue in the association code. */
11074 if (integer_all_onesp (arg1))
11075 return non_lvalue (fold_convert (type, arg0));
11076 if (integer_zerop (arg1))
11077 return omit_one_operand (type, arg1, arg0);
11078 if (operand_equal_p (arg0, arg1, 0))
11079 return non_lvalue (fold_convert (type, arg0));
11081 /* ~X & X is always zero. */
11082 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11083 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11084 return omit_one_operand (type, integer_zero_node, arg1);
11086 /* X & ~X is always zero. */
11087 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11088 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11089 return omit_one_operand (type, integer_zero_node, arg0);
11091 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11092 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11093 && TREE_CODE (arg1) == INTEGER_CST
11094 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11096 tree tmp1 = fold_convert (type, arg1);
11097 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11098 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11099 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11100 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11101 return fold_convert (type,
11102 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11105 /* (X | Y) & Y is (X, Y). */
11106 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11107 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11108 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11109 /* (X | Y) & X is (Y, X). */
11110 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11111 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11112 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11113 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11114 /* X & (X | Y) is (Y, X). */
11115 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11116 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11117 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11118 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11119 /* X & (Y | X) is (Y, X). */
11120 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11121 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11122 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11123 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11125 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11126 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11127 && integer_onep (TREE_OPERAND (arg0, 1))
11128 && integer_onep (arg1))
11130 tem = TREE_OPERAND (arg0, 0);
11131 return fold_build2 (EQ_EXPR, type,
11132 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11133 build_int_cst (TREE_TYPE (tem), 1)),
11134 build_int_cst (TREE_TYPE (tem), 0));
11136 /* Fold ~X & 1 as (X & 1) == 0. */
11137 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11138 && integer_onep (arg1))
11140 tem = TREE_OPERAND (arg0, 0);
11141 return fold_build2 (EQ_EXPR, type,
11142 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11143 build_int_cst (TREE_TYPE (tem), 1)),
11144 build_int_cst (TREE_TYPE (tem), 0));
11147 /* Fold (X ^ Y) & Y as ~X & Y. */
11148 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11149 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11151 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11152 return fold_build2 (BIT_AND_EXPR, type,
11153 fold_build1 (BIT_NOT_EXPR, type, tem),
11154 fold_convert (type, arg1));
11156 /* Fold (X ^ Y) & X as ~Y & X. */
11157 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11158 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11159 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11161 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11162 return fold_build2 (BIT_AND_EXPR, type,
11163 fold_build1 (BIT_NOT_EXPR, type, tem),
11164 fold_convert (type, arg1));
11166 /* Fold X & (X ^ Y) as X & ~Y. */
11167 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11168 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11170 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11171 return fold_build2 (BIT_AND_EXPR, type,
11172 fold_convert (type, arg0),
11173 fold_build1 (BIT_NOT_EXPR, type, tem));
11175 /* Fold X & (Y ^ X) as ~Y & X. */
11176 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11177 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11178 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11180 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11181 return fold_build2 (BIT_AND_EXPR, type,
11182 fold_build1 (BIT_NOT_EXPR, type, tem),
11183 fold_convert (type, arg0));
11186 t1 = distribute_bit_expr (code, type, arg0, arg1);
11187 if (t1 != NULL_TREE)
11189 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11190 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11191 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11194 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11196 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11197 && (~TREE_INT_CST_LOW (arg1)
11198 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11199 return fold_convert (type, TREE_OPERAND (arg0, 0));
11202 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11204 This results in more efficient code for machines without a NOR
11205 instruction. Combine will canonicalize to the first form
11206 which will allow use of NOR instructions provided by the
11207 backend if they exist. */
11208 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11209 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11211 return fold_build1 (BIT_NOT_EXPR, type,
11212 build2 (BIT_IOR_EXPR, type,
11213 fold_convert (type,
11214 TREE_OPERAND (arg0, 0)),
11215 fold_convert (type,
11216 TREE_OPERAND (arg1, 0))));
11219 /* If arg0 is derived from the address of an object or function, we may
11220 be able to fold this expression using the object or function's
11222 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11224 unsigned HOST_WIDE_INT modulus, residue;
11225 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11227 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11228 integer_onep (arg1));
11230 /* This works because modulus is a power of 2. If this weren't the
11231 case, we'd have to replace it by its greatest power-of-2
11232 divisor: modulus & -modulus. */
11234 return build_int_cst (type, residue & low);
11237 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11238 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11239 if the new mask might be further optimized. */
11240 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11241 || TREE_CODE (arg0) == RSHIFT_EXPR)
11242 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11243 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11244 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11245 < TYPE_PRECISION (TREE_TYPE (arg0))
11246 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11247 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11249 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11250 unsigned HOST_WIDE_INT mask
11251 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11252 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11253 tree shift_type = TREE_TYPE (arg0);
11255 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11256 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11257 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11258 && TYPE_PRECISION (TREE_TYPE (arg0))
11259 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11261 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11262 tree arg00 = TREE_OPERAND (arg0, 0);
11263 /* See if more bits can be proven as zero because of
11265 if (TREE_CODE (arg00) == NOP_EXPR
11266 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11268 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11269 if (TYPE_PRECISION (inner_type)
11270 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11271 && TYPE_PRECISION (inner_type) < prec)
11273 prec = TYPE_PRECISION (inner_type);
11274 /* See if we can shorten the right shift. */
11276 shift_type = inner_type;
11279 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11280 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11281 zerobits <<= prec - shiftc;
11282 /* For arithmetic shift if sign bit could be set, zerobits
11283 can contain actually sign bits, so no transformation is
11284 possible, unless MASK masks them all away. In that
11285 case the shift needs to be converted into logical shift. */
11286 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11287 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11289 if ((mask & zerobits) == 0)
11290 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11296 /* ((X << 16) & 0xff00) is (X, 0). */
11297 if ((mask & zerobits) == mask)
11298 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11300 newmask = mask | zerobits;
11301 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11305 /* Only do the transformation if NEWMASK is some integer
11307 for (prec = BITS_PER_UNIT;
11308 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11309 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11311 if (prec < HOST_BITS_PER_WIDE_INT
11312 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11314 if (shift_type != TREE_TYPE (arg0))
11316 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11317 fold_convert (shift_type,
11318 TREE_OPERAND (arg0, 0)),
11319 TREE_OPERAND (arg0, 1));
11320 tem = fold_convert (type, tem);
11324 return fold_build2 (BIT_AND_EXPR, type, tem,
11325 build_int_cst_type (TREE_TYPE (op1),
11334 /* Don't touch a floating-point divide by zero unless the mode
11335 of the constant can represent infinity. */
11336 if (TREE_CODE (arg1) == REAL_CST
11337 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11338 && real_zerop (arg1))
11341 /* Optimize A / A to 1.0 if we don't care about
11342 NaNs or Infinities. Skip the transformation
11343 for non-real operands. */
11344 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11345 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11346 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11347 && operand_equal_p (arg0, arg1, 0))
11349 tree r = build_real (TREE_TYPE (arg0), dconst1);
11351 return omit_two_operands (type, r, arg0, arg1);
11354 /* The complex version of the above A / A optimization. */
11355 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11356 && operand_equal_p (arg0, arg1, 0))
11358 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11359 if (! HONOR_NANS (TYPE_MODE (elem_type))
11360 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11362 tree r = build_real (elem_type, dconst1);
11363 /* omit_two_operands will call fold_convert for us. */
11364 return omit_two_operands (type, r, arg0, arg1);
11368 /* (-A) / (-B) -> A / B */
11369 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11370 return fold_build2 (RDIV_EXPR, type,
11371 TREE_OPERAND (arg0, 0),
11372 negate_expr (arg1));
11373 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11374 return fold_build2 (RDIV_EXPR, type,
11375 negate_expr (arg0),
11376 TREE_OPERAND (arg1, 0));
11378 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11379 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11380 && real_onep (arg1))
11381 return non_lvalue (fold_convert (type, arg0));
11383 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11384 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11385 && real_minus_onep (arg1))
11386 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11388 /* If ARG1 is a constant, we can convert this to a multiply by the
11389 reciprocal. This does not have the same rounding properties,
11390 so only do this if -freciprocal-math. We can actually
11391 always safely do it if ARG1 is a power of two, but it's hard to
11392 tell if it is or not in a portable manner. */
11393 if (TREE_CODE (arg1) == REAL_CST)
11395 if (flag_reciprocal_math
11396 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11398 return fold_build2 (MULT_EXPR, type, arg0, tem);
11399 /* Find the reciprocal if optimizing and the result is exact. */
11403 r = TREE_REAL_CST (arg1);
11404 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11406 tem = build_real (type, r);
11407 return fold_build2 (MULT_EXPR, type,
11408 fold_convert (type, arg0), tem);
11412 /* Convert A/B/C to A/(B*C). */
11413 if (flag_reciprocal_math
11414 && TREE_CODE (arg0) == RDIV_EXPR)
11415 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11416 fold_build2 (MULT_EXPR, type,
11417 TREE_OPERAND (arg0, 1), arg1));
11419 /* Convert A/(B/C) to (A/B)*C. */
11420 if (flag_reciprocal_math
11421 && TREE_CODE (arg1) == RDIV_EXPR)
11422 return fold_build2 (MULT_EXPR, type,
11423 fold_build2 (RDIV_EXPR, type, arg0,
11424 TREE_OPERAND (arg1, 0)),
11425 TREE_OPERAND (arg1, 1));
11427 /* Convert C1/(X*C2) into (C1/C2)/X. */
11428 if (flag_reciprocal_math
11429 && TREE_CODE (arg1) == MULT_EXPR
11430 && TREE_CODE (arg0) == REAL_CST
11431 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11433 tree tem = const_binop (RDIV_EXPR, arg0,
11434 TREE_OPERAND (arg1, 1), 0);
11436 return fold_build2 (RDIV_EXPR, type, tem,
11437 TREE_OPERAND (arg1, 0));
11440 if (flag_unsafe_math_optimizations)
11442 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11443 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11445 /* Optimize sin(x)/cos(x) as tan(x). */
11446 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11447 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11448 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11449 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11450 CALL_EXPR_ARG (arg1, 0), 0))
11452 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11454 if (tanfn != NULL_TREE)
11455 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11458 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11459 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11460 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11461 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11462 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11463 CALL_EXPR_ARG (arg1, 0), 0))
11465 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11467 if (tanfn != NULL_TREE)
11469 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11470 return fold_build2 (RDIV_EXPR, type,
11471 build_real (type, dconst1), tmp);
11475 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11476 NaNs or Infinities. */
11477 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11478 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11479 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11481 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11482 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11484 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11485 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11486 && operand_equal_p (arg00, arg01, 0))
11488 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11490 if (cosfn != NULL_TREE)
11491 return build_call_expr (cosfn, 1, arg00);
11495 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11496 NaNs or Infinities. */
11497 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11498 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11499 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11501 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11502 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11504 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11505 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11506 && operand_equal_p (arg00, arg01, 0))
11508 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11510 if (cosfn != NULL_TREE)
11512 tree tmp = build_call_expr (cosfn, 1, arg00);
11513 return fold_build2 (RDIV_EXPR, type,
11514 build_real (type, dconst1),
11520 /* Optimize pow(x,c)/x as pow(x,c-1). */
11521 if (fcode0 == BUILT_IN_POW
11522 || fcode0 == BUILT_IN_POWF
11523 || fcode0 == BUILT_IN_POWL)
11525 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11526 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11527 if (TREE_CODE (arg01) == REAL_CST
11528 && !TREE_OVERFLOW (arg01)
11529 && operand_equal_p (arg1, arg00, 0))
11531 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11535 c = TREE_REAL_CST (arg01);
11536 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11537 arg = build_real (type, c);
11538 return build_call_expr (powfn, 2, arg1, arg);
11542 /* Optimize a/root(b/c) into a*root(c/b). */
11543 if (BUILTIN_ROOT_P (fcode1))
11545 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11547 if (TREE_CODE (rootarg) == RDIV_EXPR)
11549 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11550 tree b = TREE_OPERAND (rootarg, 0);
11551 tree c = TREE_OPERAND (rootarg, 1);
11553 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11555 tmp = build_call_expr (rootfn, 1, tmp);
11556 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11560 /* Optimize x/expN(y) into x*expN(-y). */
11561 if (BUILTIN_EXPONENT_P (fcode1))
11563 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11564 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11565 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11566 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11569 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11570 if (fcode1 == BUILT_IN_POW
11571 || fcode1 == BUILT_IN_POWF
11572 || fcode1 == BUILT_IN_POWL)
11574 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11575 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11576 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11577 tree neg11 = fold_convert (type, negate_expr (arg11));
11578 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11579 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11584 case TRUNC_DIV_EXPR:
11585 case FLOOR_DIV_EXPR:
11586 /* Simplify A / (B << N) where A and B are positive and B is
11587 a power of 2, to A >> (N + log2(B)). */
11588 strict_overflow_p = false;
11589 if (TREE_CODE (arg1) == LSHIFT_EXPR
11590 && (TYPE_UNSIGNED (type)
11591 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11593 tree sval = TREE_OPERAND (arg1, 0);
11594 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11596 tree sh_cnt = TREE_OPERAND (arg1, 1);
11597 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11599 if (strict_overflow_p)
11600 fold_overflow_warning (("assuming signed overflow does not "
11601 "occur when simplifying A / (B << N)"),
11602 WARN_STRICT_OVERFLOW_MISC);
11604 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11605 sh_cnt, build_int_cst (NULL_TREE, pow2));
11606 return fold_build2 (RSHIFT_EXPR, type,
11607 fold_convert (type, arg0), sh_cnt);
11611 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11612 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11613 if (INTEGRAL_TYPE_P (type)
11614 && TYPE_UNSIGNED (type)
11615 && code == FLOOR_DIV_EXPR)
11616 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11620 case ROUND_DIV_EXPR:
11621 case CEIL_DIV_EXPR:
11622 case EXACT_DIV_EXPR:
11623 if (integer_onep (arg1))
11624 return non_lvalue (fold_convert (type, arg0));
11625 if (integer_zerop (arg1))
11627 /* X / -1 is -X. */
11628 if (!TYPE_UNSIGNED (type)
11629 && TREE_CODE (arg1) == INTEGER_CST
11630 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11631 && TREE_INT_CST_HIGH (arg1) == -1)
11632 return fold_convert (type, negate_expr (arg0));
11634 /* Convert -A / -B to A / B when the type is signed and overflow is
11636 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11637 && TREE_CODE (arg0) == NEGATE_EXPR
11638 && negate_expr_p (arg1))
11640 if (INTEGRAL_TYPE_P (type))
11641 fold_overflow_warning (("assuming signed overflow does not occur "
11642 "when distributing negation across "
11644 WARN_STRICT_OVERFLOW_MISC);
11645 return fold_build2 (code, type,
11646 fold_convert (type, TREE_OPERAND (arg0, 0)),
11647 fold_convert (type, negate_expr (arg1)));
11649 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11650 && TREE_CODE (arg1) == NEGATE_EXPR
11651 && negate_expr_p (arg0))
11653 if (INTEGRAL_TYPE_P (type))
11654 fold_overflow_warning (("assuming signed overflow does not occur "
11655 "when distributing negation across "
11657 WARN_STRICT_OVERFLOW_MISC);
11658 return fold_build2 (code, type,
11659 fold_convert (type, negate_expr (arg0)),
11660 fold_convert (type, TREE_OPERAND (arg1, 0)));
11663 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11664 operation, EXACT_DIV_EXPR.
11666 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11667 At one time others generated faster code, it's not clear if they do
11668 after the last round to changes to the DIV code in expmed.c. */
11669 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11670 && multiple_of_p (type, arg0, arg1))
11671 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11673 strict_overflow_p = false;
11674 if (TREE_CODE (arg1) == INTEGER_CST
11675 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11676 &strict_overflow_p)))
11678 if (strict_overflow_p)
11679 fold_overflow_warning (("assuming signed overflow does not occur "
11680 "when simplifying division"),
11681 WARN_STRICT_OVERFLOW_MISC);
11682 return fold_convert (type, tem);
11687 case CEIL_MOD_EXPR:
11688 case FLOOR_MOD_EXPR:
11689 case ROUND_MOD_EXPR:
11690 case TRUNC_MOD_EXPR:
11691 /* X % 1 is always zero, but be sure to preserve any side
11693 if (integer_onep (arg1))
11694 return omit_one_operand (type, integer_zero_node, arg0);
11696 /* X % 0, return X % 0 unchanged so that we can get the
11697 proper warnings and errors. */
11698 if (integer_zerop (arg1))
11701 /* 0 % X is always zero, but be sure to preserve any side
11702 effects in X. Place this after checking for X == 0. */
11703 if (integer_zerop (arg0))
11704 return omit_one_operand (type, integer_zero_node, arg1);
11706 /* X % -1 is zero. */
11707 if (!TYPE_UNSIGNED (type)
11708 && TREE_CODE (arg1) == INTEGER_CST
11709 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11710 && TREE_INT_CST_HIGH (arg1) == -1)
11711 return omit_one_operand (type, integer_zero_node, arg0);
11713 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11714 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11715 strict_overflow_p = false;
11716 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11717 && (TYPE_UNSIGNED (type)
11718 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11721 /* Also optimize A % (C << N) where C is a power of 2,
11722 to A & ((C << N) - 1). */
11723 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11724 c = TREE_OPERAND (arg1, 0);
11726 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11728 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11729 build_int_cst (TREE_TYPE (arg1), 1));
11730 if (strict_overflow_p)
11731 fold_overflow_warning (("assuming signed overflow does not "
11732 "occur when simplifying "
11733 "X % (power of two)"),
11734 WARN_STRICT_OVERFLOW_MISC);
11735 return fold_build2 (BIT_AND_EXPR, type,
11736 fold_convert (type, arg0),
11737 fold_convert (type, mask));
11741 /* X % -C is the same as X % C. */
11742 if (code == TRUNC_MOD_EXPR
11743 && !TYPE_UNSIGNED (type)
11744 && TREE_CODE (arg1) == INTEGER_CST
11745 && !TREE_OVERFLOW (arg1)
11746 && TREE_INT_CST_HIGH (arg1) < 0
11747 && !TYPE_OVERFLOW_TRAPS (type)
11748 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11749 && !sign_bit_p (arg1, arg1))
11750 return fold_build2 (code, type, fold_convert (type, arg0),
11751 fold_convert (type, negate_expr (arg1)));
11753 /* X % -Y is the same as X % Y. */
11754 if (code == TRUNC_MOD_EXPR
11755 && !TYPE_UNSIGNED (type)
11756 && TREE_CODE (arg1) == NEGATE_EXPR
11757 && !TYPE_OVERFLOW_TRAPS (type))
11758 return fold_build2 (code, type, fold_convert (type, arg0),
11759 fold_convert (type, TREE_OPERAND (arg1, 0)));
11761 if (TREE_CODE (arg1) == INTEGER_CST
11762 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11763 &strict_overflow_p)))
11765 if (strict_overflow_p)
11766 fold_overflow_warning (("assuming signed overflow does not occur "
11767 "when simplifying modulus"),
11768 WARN_STRICT_OVERFLOW_MISC);
11769 return fold_convert (type, tem);
11776 if (integer_all_onesp (arg0))
11777 return omit_one_operand (type, arg0, arg1);
11781 /* Optimize -1 >> x for arithmetic right shifts. */
11782 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11783 && tree_expr_nonnegative_p (arg1))
11784 return omit_one_operand (type, arg0, arg1);
11785 /* ... fall through ... */
11789 if (integer_zerop (arg1))
11790 return non_lvalue (fold_convert (type, arg0));
11791 if (integer_zerop (arg0))
11792 return omit_one_operand (type, arg0, arg1);
11794 /* Since negative shift count is not well-defined,
11795 don't try to compute it in the compiler. */
11796 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11799 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11800 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11801 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11802 && host_integerp (TREE_OPERAND (arg0, 1), false)
11803 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11805 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11806 + TREE_INT_CST_LOW (arg1));
11808 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11809 being well defined. */
11810 if (low >= TYPE_PRECISION (type))
11812 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11813 low = low % TYPE_PRECISION (type);
11814 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11815 return build_int_cst (type, 0);
11817 low = TYPE_PRECISION (type) - 1;
11820 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11821 build_int_cst (type, low));
11824 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11825 into x & ((unsigned)-1 >> c) for unsigned types. */
11826 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11827 || (TYPE_UNSIGNED (type)
11828 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11829 && host_integerp (arg1, false)
11830 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11831 && host_integerp (TREE_OPERAND (arg0, 1), false)
11832 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11834 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11835 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11841 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11843 lshift = build_int_cst (type, -1);
11844 lshift = int_const_binop (code, lshift, arg1, 0);
11846 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11850 /* Rewrite an LROTATE_EXPR by a constant into an
11851 RROTATE_EXPR by a new constant. */
11852 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11854 tree tem = build_int_cst (TREE_TYPE (arg1),
11855 TYPE_PRECISION (type));
11856 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11857 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11860 /* If we have a rotate of a bit operation with the rotate count and
11861 the second operand of the bit operation both constant,
11862 permute the two operations. */
11863 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11864 && (TREE_CODE (arg0) == BIT_AND_EXPR
11865 || TREE_CODE (arg0) == BIT_IOR_EXPR
11866 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11867 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11868 return fold_build2 (TREE_CODE (arg0), type,
11869 fold_build2 (code, type,
11870 TREE_OPERAND (arg0, 0), arg1),
11871 fold_build2 (code, type,
11872 TREE_OPERAND (arg0, 1), arg1));
11874 /* Two consecutive rotates adding up to the precision of the
11875 type can be ignored. */
11876 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11877 && TREE_CODE (arg0) == RROTATE_EXPR
11878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11879 && TREE_INT_CST_HIGH (arg1) == 0
11880 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11881 && ((TREE_INT_CST_LOW (arg1)
11882 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11883 == (unsigned int) TYPE_PRECISION (type)))
11884 return TREE_OPERAND (arg0, 0);
11886 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11887 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11888 if the latter can be further optimized. */
11889 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11890 && TREE_CODE (arg0) == BIT_AND_EXPR
11891 && TREE_CODE (arg1) == INTEGER_CST
11892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11894 tree mask = fold_build2 (code, type,
11895 fold_convert (type, TREE_OPERAND (arg0, 1)),
11897 tree shift = fold_build2 (code, type,
11898 fold_convert (type, TREE_OPERAND (arg0, 0)),
11900 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11908 if (operand_equal_p (arg0, arg1, 0))
11909 return omit_one_operand (type, arg0, arg1);
11910 if (INTEGRAL_TYPE_P (type)
11911 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11912 return omit_one_operand (type, arg1, arg0);
11913 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11919 if (operand_equal_p (arg0, arg1, 0))
11920 return omit_one_operand (type, arg0, arg1);
11921 if (INTEGRAL_TYPE_P (type)
11922 && TYPE_MAX_VALUE (type)
11923 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11924 return omit_one_operand (type, arg1, arg0);
11925 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11930 case TRUTH_ANDIF_EXPR:
11931 /* Note that the operands of this must be ints
11932 and their values must be 0 or 1.
11933 ("true" is a fixed value perhaps depending on the language.) */
11934 /* If first arg is constant zero, return it. */
11935 if (integer_zerop (arg0))
11936 return fold_convert (type, arg0);
11937 case TRUTH_AND_EXPR:
11938 /* If either arg is constant true, drop it. */
11939 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11940 return non_lvalue (fold_convert (type, arg1));
11941 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11942 /* Preserve sequence points. */
11943 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11944 return non_lvalue (fold_convert (type, arg0));
11945 /* If second arg is constant zero, result is zero, but first arg
11946 must be evaluated. */
11947 if (integer_zerop (arg1))
11948 return omit_one_operand (type, arg1, arg0);
11949 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11950 case will be handled here. */
11951 if (integer_zerop (arg0))
11952 return omit_one_operand (type, arg0, arg1);
11954 /* !X && X is always false. */
11955 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11956 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11957 return omit_one_operand (type, integer_zero_node, arg1);
11958 /* X && !X is always false. */
11959 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11960 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11961 return omit_one_operand (type, integer_zero_node, arg0);
11963 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11964 means A >= Y && A != MAX, but in this case we know that
11967 if (!TREE_SIDE_EFFECTS (arg0)
11968 && !TREE_SIDE_EFFECTS (arg1))
11970 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11971 if (tem && !operand_equal_p (tem, arg0, 0))
11972 return fold_build2 (code, type, tem, arg1);
11974 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11975 if (tem && !operand_equal_p (tem, arg1, 0))
11976 return fold_build2 (code, type, arg0, tem);
11980 /* We only do these simplifications if we are optimizing. */
11984 /* Check for things like (A || B) && (A || C). We can convert this
11985 to A || (B && C). Note that either operator can be any of the four
11986 truth and/or operations and the transformation will still be
11987 valid. Also note that we only care about order for the
11988 ANDIF and ORIF operators. If B contains side effects, this
11989 might change the truth-value of A. */
11990 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11991 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11992 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11993 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11994 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11995 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11997 tree a00 = TREE_OPERAND (arg0, 0);
11998 tree a01 = TREE_OPERAND (arg0, 1);
11999 tree a10 = TREE_OPERAND (arg1, 0);
12000 tree a11 = TREE_OPERAND (arg1, 1);
12001 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12002 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12003 && (code == TRUTH_AND_EXPR
12004 || code == TRUTH_OR_EXPR));
12006 if (operand_equal_p (a00, a10, 0))
12007 return fold_build2 (TREE_CODE (arg0), type, a00,
12008 fold_build2 (code, type, a01, a11));
12009 else if (commutative && operand_equal_p (a00, a11, 0))
12010 return fold_build2 (TREE_CODE (arg0), type, a00,
12011 fold_build2 (code, type, a01, a10));
12012 else if (commutative && operand_equal_p (a01, a10, 0))
12013 return fold_build2 (TREE_CODE (arg0), type, a01,
12014 fold_build2 (code, type, a00, a11));
12016 /* This case if tricky because we must either have commutative
12017 operators or else A10 must not have side-effects. */
12019 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12020 && operand_equal_p (a01, a11, 0))
12021 return fold_build2 (TREE_CODE (arg0), type,
12022 fold_build2 (code, type, a00, a10),
12026 /* See if we can build a range comparison. */
12027 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12030 /* Check for the possibility of merging component references. If our
12031 lhs is another similar operation, try to merge its rhs with our
12032 rhs. Then try to merge our lhs and rhs. */
12033 if (TREE_CODE (arg0) == code
12034 && 0 != (tem = fold_truthop (code, type,
12035 TREE_OPERAND (arg0, 1), arg1)))
12036 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12038 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12043 case TRUTH_ORIF_EXPR:
12044 /* Note that the operands of this must be ints
12045 and their values must be 0 or true.
12046 ("true" is a fixed value perhaps depending on the language.) */
12047 /* If first arg is constant true, return it. */
12048 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12049 return fold_convert (type, arg0);
12050 case TRUTH_OR_EXPR:
12051 /* If either arg is constant zero, drop it. */
12052 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12053 return non_lvalue (fold_convert (type, arg1));
12054 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12055 /* Preserve sequence points. */
12056 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12057 return non_lvalue (fold_convert (type, arg0));
12058 /* If second arg is constant true, result is true, but we must
12059 evaluate first arg. */
12060 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12061 return omit_one_operand (type, arg1, arg0);
12062 /* Likewise for first arg, but note this only occurs here for
12064 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12065 return omit_one_operand (type, arg0, arg1);
12067 /* !X || X is always true. */
12068 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12069 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12070 return omit_one_operand (type, integer_one_node, arg1);
12071 /* X || !X is always true. */
12072 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12073 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12074 return omit_one_operand (type, integer_one_node, arg0);
12078 case TRUTH_XOR_EXPR:
12079 /* If the second arg is constant zero, drop it. */
12080 if (integer_zerop (arg1))
12081 return non_lvalue (fold_convert (type, arg0));
12082 /* If the second arg is constant true, this is a logical inversion. */
12083 if (integer_onep (arg1))
12085 /* Only call invert_truthvalue if operand is a truth value. */
12086 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12087 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12089 tem = invert_truthvalue (arg0);
12090 return non_lvalue (fold_convert (type, tem));
12092 /* Identical arguments cancel to zero. */
12093 if (operand_equal_p (arg0, arg1, 0))
12094 return omit_one_operand (type, integer_zero_node, arg0);
12096 /* !X ^ X is always true. */
12097 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12098 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12099 return omit_one_operand (type, integer_one_node, arg1);
12101 /* X ^ !X is always true. */
12102 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12104 return omit_one_operand (type, integer_one_node, arg0);
12110 tem = fold_comparison (code, type, op0, op1);
12111 if (tem != NULL_TREE)
12114 /* bool_var != 0 becomes bool_var. */
12115 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12116 && code == NE_EXPR)
12117 return non_lvalue (fold_convert (type, arg0));
12119 /* bool_var == 1 becomes bool_var. */
12120 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12121 && code == EQ_EXPR)
12122 return non_lvalue (fold_convert (type, arg0));
12124 /* bool_var != 1 becomes !bool_var. */
12125 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12126 && code == NE_EXPR)
12127 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12129 /* bool_var == 0 becomes !bool_var. */
12130 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12131 && code == EQ_EXPR)
12132 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12134 /* If this is an equality comparison of the address of two non-weak,
12135 unaliased symbols neither of which are extern (since we do not
12136 have access to attributes for externs), then we know the result. */
12137 if (TREE_CODE (arg0) == ADDR_EXPR
12138 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12139 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12140 && ! lookup_attribute ("alias",
12141 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12142 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12143 && TREE_CODE (arg1) == ADDR_EXPR
12144 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12145 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12146 && ! lookup_attribute ("alias",
12147 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12148 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12150 /* We know that we're looking at the address of two
12151 non-weak, unaliased, static _DECL nodes.
12153 It is both wasteful and incorrect to call operand_equal_p
12154 to compare the two ADDR_EXPR nodes. It is wasteful in that
12155 all we need to do is test pointer equality for the arguments
12156 to the two ADDR_EXPR nodes. It is incorrect to use
12157 operand_equal_p as that function is NOT equivalent to a
12158 C equality test. It can in fact return false for two
12159 objects which would test as equal using the C equality
12161 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12162 return constant_boolean_node (equal
12163 ? code == EQ_EXPR : code != EQ_EXPR,
12167 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12168 a MINUS_EXPR of a constant, we can convert it into a comparison with
12169 a revised constant as long as no overflow occurs. */
12170 if (TREE_CODE (arg1) == INTEGER_CST
12171 && (TREE_CODE (arg0) == PLUS_EXPR
12172 || TREE_CODE (arg0) == MINUS_EXPR)
12173 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12174 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12175 ? MINUS_EXPR : PLUS_EXPR,
12176 fold_convert (TREE_TYPE (arg0), arg1),
12177 TREE_OPERAND (arg0, 1), 0))
12178 && !TREE_OVERFLOW (tem))
12179 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12181 /* Similarly for a NEGATE_EXPR. */
12182 if (TREE_CODE (arg0) == NEGATE_EXPR
12183 && TREE_CODE (arg1) == INTEGER_CST
12184 && 0 != (tem = negate_expr (arg1))
12185 && TREE_CODE (tem) == INTEGER_CST
12186 && !TREE_OVERFLOW (tem))
12187 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12189 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12190 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12191 && TREE_CODE (arg1) == INTEGER_CST
12192 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12193 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12194 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12195 fold_convert (TREE_TYPE (arg0), arg1),
12196 TREE_OPERAND (arg0, 1)));
12198 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12199 if ((TREE_CODE (arg0) == PLUS_EXPR
12200 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12201 || TREE_CODE (arg0) == MINUS_EXPR)
12202 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12203 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12204 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12206 tree val = TREE_OPERAND (arg0, 1);
12207 return omit_two_operands (type,
12208 fold_build2 (code, type,
12210 build_int_cst (TREE_TYPE (val),
12212 TREE_OPERAND (arg0, 0), arg1);
12215 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12216 if (TREE_CODE (arg0) == MINUS_EXPR
12217 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12218 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12219 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12221 return omit_two_operands (type,
12223 ? boolean_true_node : boolean_false_node,
12224 TREE_OPERAND (arg0, 1), arg1);
12227 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12228 for !=. Don't do this for ordered comparisons due to overflow. */
12229 if (TREE_CODE (arg0) == MINUS_EXPR
12230 && integer_zerop (arg1))
12231 return fold_build2 (code, type,
12232 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12234 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12235 if (TREE_CODE (arg0) == ABS_EXPR
12236 && (integer_zerop (arg1) || real_zerop (arg1)))
12237 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12239 /* If this is an EQ or NE comparison with zero and ARG0 is
12240 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12241 two operations, but the latter can be done in one less insn
12242 on machines that have only two-operand insns or on which a
12243 constant cannot be the first operand. */
12244 if (TREE_CODE (arg0) == BIT_AND_EXPR
12245 && integer_zerop (arg1))
12247 tree arg00 = TREE_OPERAND (arg0, 0);
12248 tree arg01 = TREE_OPERAND (arg0, 1);
12249 if (TREE_CODE (arg00) == LSHIFT_EXPR
12250 && integer_onep (TREE_OPERAND (arg00, 0)))
12252 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12253 arg01, TREE_OPERAND (arg00, 1));
12254 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12255 build_int_cst (TREE_TYPE (arg0), 1));
12256 return fold_build2 (code, type,
12257 fold_convert (TREE_TYPE (arg1), tem), arg1);
12259 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12260 && integer_onep (TREE_OPERAND (arg01, 0)))
12262 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12263 arg00, TREE_OPERAND (arg01, 1));
12264 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12265 build_int_cst (TREE_TYPE (arg0), 1));
12266 return fold_build2 (code, type,
12267 fold_convert (TREE_TYPE (arg1), tem), arg1);
12271 /* If this is an NE or EQ comparison of zero against the result of a
12272 signed MOD operation whose second operand is a power of 2, make
12273 the MOD operation unsigned since it is simpler and equivalent. */
12274 if (integer_zerop (arg1)
12275 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12276 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12277 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12278 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12279 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12280 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12282 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12283 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12284 fold_convert (newtype,
12285 TREE_OPERAND (arg0, 0)),
12286 fold_convert (newtype,
12287 TREE_OPERAND (arg0, 1)));
12289 return fold_build2 (code, type, newmod,
12290 fold_convert (newtype, arg1));
12293 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12294 C1 is a valid shift constant, and C2 is a power of two, i.e.
12296 if (TREE_CODE (arg0) == BIT_AND_EXPR
12297 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12298 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12300 && integer_pow2p (TREE_OPERAND (arg0, 1))
12301 && integer_zerop (arg1))
12303 tree itype = TREE_TYPE (arg0);
12304 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12305 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12307 /* Check for a valid shift count. */
12308 if (TREE_INT_CST_HIGH (arg001) == 0
12309 && TREE_INT_CST_LOW (arg001) < prec)
12311 tree arg01 = TREE_OPERAND (arg0, 1);
12312 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12313 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12314 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12315 can be rewritten as (X & (C2 << C1)) != 0. */
12316 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12318 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12319 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12320 return fold_build2 (code, type, tem, arg1);
12322 /* Otherwise, for signed (arithmetic) shifts,
12323 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12324 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12325 else if (!TYPE_UNSIGNED (itype))
12326 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12327 arg000, build_int_cst (itype, 0));
12328 /* Otherwise, of unsigned (logical) shifts,
12329 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12330 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12332 return omit_one_operand (type,
12333 code == EQ_EXPR ? integer_one_node
12334 : integer_zero_node,
12339 /* If this is an NE comparison of zero with an AND of one, remove the
12340 comparison since the AND will give the correct value. */
12341 if (code == NE_EXPR
12342 && integer_zerop (arg1)
12343 && TREE_CODE (arg0) == BIT_AND_EXPR
12344 && integer_onep (TREE_OPERAND (arg0, 1)))
12345 return fold_convert (type, arg0);
12347 /* If we have (A & C) == C where C is a power of 2, convert this into
12348 (A & C) != 0. Similarly for NE_EXPR. */
12349 if (TREE_CODE (arg0) == BIT_AND_EXPR
12350 && integer_pow2p (TREE_OPERAND (arg0, 1))
12351 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12352 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12353 arg0, fold_convert (TREE_TYPE (arg0),
12354 integer_zero_node));
12356 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12357 bit, then fold the expression into A < 0 or A >= 0. */
12358 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12362 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12363 Similarly for NE_EXPR. */
12364 if (TREE_CODE (arg0) == BIT_AND_EXPR
12365 && TREE_CODE (arg1) == INTEGER_CST
12366 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12368 tree notc = fold_build1 (BIT_NOT_EXPR,
12369 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12370 TREE_OPERAND (arg0, 1));
12371 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12373 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12374 if (integer_nonzerop (dandnotc))
12375 return omit_one_operand (type, rslt, arg0);
12378 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12379 Similarly for NE_EXPR. */
12380 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12381 && TREE_CODE (arg1) == INTEGER_CST
12382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12384 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12385 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12386 TREE_OPERAND (arg0, 1), notd);
12387 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12388 if (integer_nonzerop (candnotd))
12389 return omit_one_operand (type, rslt, arg0);
12392 /* If this is a comparison of a field, we may be able to simplify it. */
12393 if ((TREE_CODE (arg0) == COMPONENT_REF
12394 || TREE_CODE (arg0) == BIT_FIELD_REF)
12395 /* Handle the constant case even without -O
12396 to make sure the warnings are given. */
12397 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12399 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12404 /* Optimize comparisons of strlen vs zero to a compare of the
12405 first character of the string vs zero. To wit,
12406 strlen(ptr) == 0 => *ptr == 0
12407 strlen(ptr) != 0 => *ptr != 0
12408 Other cases should reduce to one of these two (or a constant)
12409 due to the return value of strlen being unsigned. */
12410 if (TREE_CODE (arg0) == CALL_EXPR
12411 && integer_zerop (arg1))
12413 tree fndecl = get_callee_fndecl (arg0);
12416 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12417 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12418 && call_expr_nargs (arg0) == 1
12419 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12421 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12422 return fold_build2 (code, type, iref,
12423 build_int_cst (TREE_TYPE (iref), 0));
12427 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12428 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12429 if (TREE_CODE (arg0) == RSHIFT_EXPR
12430 && integer_zerop (arg1)
12431 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12433 tree arg00 = TREE_OPERAND (arg0, 0);
12434 tree arg01 = TREE_OPERAND (arg0, 1);
12435 tree itype = TREE_TYPE (arg00);
12436 if (TREE_INT_CST_HIGH (arg01) == 0
12437 && TREE_INT_CST_LOW (arg01)
12438 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12440 if (TYPE_UNSIGNED (itype))
12442 itype = signed_type_for (itype);
12443 arg00 = fold_convert (itype, arg00);
12445 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12446 type, arg00, build_int_cst (itype, 0));
12450 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12451 if (integer_zerop (arg1)
12452 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12453 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12454 TREE_OPERAND (arg0, 1));
12456 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12457 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12458 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12459 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12460 build_int_cst (TREE_TYPE (arg1), 0));
12461 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12462 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12463 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12464 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12465 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12466 build_int_cst (TREE_TYPE (arg1), 0));
12468 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12469 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12470 && TREE_CODE (arg1) == INTEGER_CST
12471 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12472 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12473 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12474 TREE_OPERAND (arg0, 1), arg1));
12476 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12477 (X & C) == 0 when C is a single bit. */
12478 if (TREE_CODE (arg0) == BIT_AND_EXPR
12479 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12480 && integer_zerop (arg1)
12481 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12483 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12484 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12485 TREE_OPERAND (arg0, 1));
12486 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12490 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12491 constant C is a power of two, i.e. a single bit. */
12492 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12493 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12494 && integer_zerop (arg1)
12495 && integer_pow2p (TREE_OPERAND (arg0, 1))
12496 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12497 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12499 tree arg00 = TREE_OPERAND (arg0, 0);
12500 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12501 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12504 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12505 when is C is a power of two, i.e. a single bit. */
12506 if (TREE_CODE (arg0) == BIT_AND_EXPR
12507 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12508 && integer_zerop (arg1)
12509 && integer_pow2p (TREE_OPERAND (arg0, 1))
12510 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12511 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12513 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12514 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12515 arg000, TREE_OPERAND (arg0, 1));
12516 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12517 tem, build_int_cst (TREE_TYPE (tem), 0));
12520 if (integer_zerop (arg1)
12521 && tree_expr_nonzero_p (arg0))
12523 tree res = constant_boolean_node (code==NE_EXPR, type);
12524 return omit_one_operand (type, res, arg0);
12527 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12528 if (TREE_CODE (arg0) == NEGATE_EXPR
12529 && TREE_CODE (arg1) == NEGATE_EXPR)
12530 return fold_build2 (code, type,
12531 TREE_OPERAND (arg0, 0),
12532 TREE_OPERAND (arg1, 0));
12534 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12535 if (TREE_CODE (arg0) == BIT_AND_EXPR
12536 && TREE_CODE (arg1) == BIT_AND_EXPR)
12538 tree arg00 = TREE_OPERAND (arg0, 0);
12539 tree arg01 = TREE_OPERAND (arg0, 1);
12540 tree arg10 = TREE_OPERAND (arg1, 0);
12541 tree arg11 = TREE_OPERAND (arg1, 1);
12542 tree itype = TREE_TYPE (arg0);
12544 if (operand_equal_p (arg01, arg11, 0))
12545 return fold_build2 (code, type,
12546 fold_build2 (BIT_AND_EXPR, itype,
12547 fold_build2 (BIT_XOR_EXPR, itype,
12550 build_int_cst (itype, 0));
12552 if (operand_equal_p (arg01, arg10, 0))
12553 return fold_build2 (code, type,
12554 fold_build2 (BIT_AND_EXPR, itype,
12555 fold_build2 (BIT_XOR_EXPR, itype,
12558 build_int_cst (itype, 0));
12560 if (operand_equal_p (arg00, arg11, 0))
12561 return fold_build2 (code, type,
12562 fold_build2 (BIT_AND_EXPR, itype,
12563 fold_build2 (BIT_XOR_EXPR, itype,
12566 build_int_cst (itype, 0));
12568 if (operand_equal_p (arg00, arg10, 0))
12569 return fold_build2 (code, type,
12570 fold_build2 (BIT_AND_EXPR, itype,
12571 fold_build2 (BIT_XOR_EXPR, itype,
12574 build_int_cst (itype, 0));
12577 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12578 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12580 tree arg00 = TREE_OPERAND (arg0, 0);
12581 tree arg01 = TREE_OPERAND (arg0, 1);
12582 tree arg10 = TREE_OPERAND (arg1, 0);
12583 tree arg11 = TREE_OPERAND (arg1, 1);
12584 tree itype = TREE_TYPE (arg0);
12586 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12587 operand_equal_p guarantees no side-effects so we don't need
12588 to use omit_one_operand on Z. */
12589 if (operand_equal_p (arg01, arg11, 0))
12590 return fold_build2 (code, type, arg00, arg10);
12591 if (operand_equal_p (arg01, arg10, 0))
12592 return fold_build2 (code, type, arg00, arg11);
12593 if (operand_equal_p (arg00, arg11, 0))
12594 return fold_build2 (code, type, arg01, arg10);
12595 if (operand_equal_p (arg00, arg10, 0))
12596 return fold_build2 (code, type, arg01, arg11);
12598 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12599 if (TREE_CODE (arg01) == INTEGER_CST
12600 && TREE_CODE (arg11) == INTEGER_CST)
12601 return fold_build2 (code, type,
12602 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12603 fold_build2 (BIT_XOR_EXPR, itype,
12608 /* Attempt to simplify equality/inequality comparisons of complex
12609 values. Only lower the comparison if the result is known or
12610 can be simplified to a single scalar comparison. */
12611 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12612 || TREE_CODE (arg0) == COMPLEX_CST)
12613 && (TREE_CODE (arg1) == COMPLEX_EXPR
12614 || TREE_CODE (arg1) == COMPLEX_CST))
12616 tree real0, imag0, real1, imag1;
12619 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12621 real0 = TREE_OPERAND (arg0, 0);
12622 imag0 = TREE_OPERAND (arg0, 1);
12626 real0 = TREE_REALPART (arg0);
12627 imag0 = TREE_IMAGPART (arg0);
12630 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12632 real1 = TREE_OPERAND (arg1, 0);
12633 imag1 = TREE_OPERAND (arg1, 1);
12637 real1 = TREE_REALPART (arg1);
12638 imag1 = TREE_IMAGPART (arg1);
12641 rcond = fold_binary (code, type, real0, real1);
12642 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12644 if (integer_zerop (rcond))
12646 if (code == EQ_EXPR)
12647 return omit_two_operands (type, boolean_false_node,
12649 return fold_build2 (NE_EXPR, type, imag0, imag1);
12653 if (code == NE_EXPR)
12654 return omit_two_operands (type, boolean_true_node,
12656 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12660 icond = fold_binary (code, type, imag0, imag1);
12661 if (icond && TREE_CODE (icond) == INTEGER_CST)
12663 if (integer_zerop (icond))
12665 if (code == EQ_EXPR)
12666 return omit_two_operands (type, boolean_false_node,
12668 return fold_build2 (NE_EXPR, type, real0, real1);
12672 if (code == NE_EXPR)
12673 return omit_two_operands (type, boolean_true_node,
12675 return fold_build2 (EQ_EXPR, type, real0, real1);
12686 tem = fold_comparison (code, type, op0, op1);
12687 if (tem != NULL_TREE)
12690 /* Transform comparisons of the form X +- C CMP X. */
12691 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12692 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12693 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12694 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12695 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12696 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12698 tree arg01 = TREE_OPERAND (arg0, 1);
12699 enum tree_code code0 = TREE_CODE (arg0);
12702 if (TREE_CODE (arg01) == REAL_CST)
12703 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12705 is_positive = tree_int_cst_sgn (arg01);
12707 /* (X - c) > X becomes false. */
12708 if (code == GT_EXPR
12709 && ((code0 == MINUS_EXPR && is_positive >= 0)
12710 || (code0 == PLUS_EXPR && is_positive <= 0)))
12712 if (TREE_CODE (arg01) == INTEGER_CST
12713 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12714 fold_overflow_warning (("assuming signed overflow does not "
12715 "occur when assuming that (X - c) > X "
12716 "is always false"),
12717 WARN_STRICT_OVERFLOW_ALL);
12718 return constant_boolean_node (0, type);
12721 /* Likewise (X + c) < X becomes false. */
12722 if (code == LT_EXPR
12723 && ((code0 == PLUS_EXPR && is_positive >= 0)
12724 || (code0 == MINUS_EXPR && is_positive <= 0)))
12726 if (TREE_CODE (arg01) == INTEGER_CST
12727 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12728 fold_overflow_warning (("assuming signed overflow does not "
12729 "occur when assuming that "
12730 "(X + c) < X is always false"),
12731 WARN_STRICT_OVERFLOW_ALL);
12732 return constant_boolean_node (0, type);
12735 /* Convert (X - c) <= X to true. */
12736 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12738 && ((code0 == MINUS_EXPR && is_positive >= 0)
12739 || (code0 == PLUS_EXPR && is_positive <= 0)))
12741 if (TREE_CODE (arg01) == INTEGER_CST
12742 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12743 fold_overflow_warning (("assuming signed overflow does not "
12744 "occur when assuming that "
12745 "(X - c) <= X is always true"),
12746 WARN_STRICT_OVERFLOW_ALL);
12747 return constant_boolean_node (1, type);
12750 /* Convert (X + c) >= X to true. */
12751 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12753 && ((code0 == PLUS_EXPR && is_positive >= 0)
12754 || (code0 == MINUS_EXPR && is_positive <= 0)))
12756 if (TREE_CODE (arg01) == INTEGER_CST
12757 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12758 fold_overflow_warning (("assuming signed overflow does not "
12759 "occur when assuming that "
12760 "(X + c) >= X is always true"),
12761 WARN_STRICT_OVERFLOW_ALL);
12762 return constant_boolean_node (1, type);
12765 if (TREE_CODE (arg01) == INTEGER_CST)
12767 /* Convert X + c > X and X - c < X to true for integers. */
12768 if (code == GT_EXPR
12769 && ((code0 == PLUS_EXPR && is_positive > 0)
12770 || (code0 == MINUS_EXPR && is_positive < 0)))
12772 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12773 fold_overflow_warning (("assuming signed overflow does "
12774 "not occur when assuming that "
12775 "(X + c) > X is always true"),
12776 WARN_STRICT_OVERFLOW_ALL);
12777 return constant_boolean_node (1, type);
12780 if (code == LT_EXPR
12781 && ((code0 == MINUS_EXPR && is_positive > 0)
12782 || (code0 == PLUS_EXPR && is_positive < 0)))
12784 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12785 fold_overflow_warning (("assuming signed overflow does "
12786 "not occur when assuming that "
12787 "(X - c) < X is always true"),
12788 WARN_STRICT_OVERFLOW_ALL);
12789 return constant_boolean_node (1, type);
12792 /* Convert X + c <= X and X - c >= X to false for integers. */
12793 if (code == LE_EXPR
12794 && ((code0 == PLUS_EXPR && is_positive > 0)
12795 || (code0 == MINUS_EXPR && is_positive < 0)))
12797 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12798 fold_overflow_warning (("assuming signed overflow does "
12799 "not occur when assuming that "
12800 "(X + c) <= X is always false"),
12801 WARN_STRICT_OVERFLOW_ALL);
12802 return constant_boolean_node (0, type);
12805 if (code == GE_EXPR
12806 && ((code0 == MINUS_EXPR && is_positive > 0)
12807 || (code0 == PLUS_EXPR && is_positive < 0)))
12809 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12810 fold_overflow_warning (("assuming signed overflow does "
12811 "not occur when assuming that "
12812 "(X - c) >= X is always false"),
12813 WARN_STRICT_OVERFLOW_ALL);
12814 return constant_boolean_node (0, type);
12819 /* Comparisons with the highest or lowest possible integer of
12820 the specified precision will have known values. */
12822 tree arg1_type = TREE_TYPE (arg1);
12823 unsigned int width = TYPE_PRECISION (arg1_type);
12825 if (TREE_CODE (arg1) == INTEGER_CST
12826 && width <= 2 * HOST_BITS_PER_WIDE_INT
12827 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12829 HOST_WIDE_INT signed_max_hi;
12830 unsigned HOST_WIDE_INT signed_max_lo;
12831 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12833 if (width <= HOST_BITS_PER_WIDE_INT)
12835 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12840 if (TYPE_UNSIGNED (arg1_type))
12842 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12848 max_lo = signed_max_lo;
12849 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12855 width -= HOST_BITS_PER_WIDE_INT;
12856 signed_max_lo = -1;
12857 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12862 if (TYPE_UNSIGNED (arg1_type))
12864 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12869 max_hi = signed_max_hi;
12870 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12874 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12875 && TREE_INT_CST_LOW (arg1) == max_lo)
12879 return omit_one_operand (type, integer_zero_node, arg0);
12882 return fold_build2 (EQ_EXPR, type, op0, op1);
12885 return omit_one_operand (type, integer_one_node, arg0);
12888 return fold_build2 (NE_EXPR, type, op0, op1);
12890 /* The GE_EXPR and LT_EXPR cases above are not normally
12891 reached because of previous transformations. */
12896 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12898 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12902 arg1 = const_binop (PLUS_EXPR, arg1,
12903 build_int_cst (TREE_TYPE (arg1), 1), 0);
12904 return fold_build2 (EQ_EXPR, type,
12905 fold_convert (TREE_TYPE (arg1), arg0),
12908 arg1 = const_binop (PLUS_EXPR, arg1,
12909 build_int_cst (TREE_TYPE (arg1), 1), 0);
12910 return fold_build2 (NE_EXPR, type,
12911 fold_convert (TREE_TYPE (arg1), arg0),
12916 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12918 && TREE_INT_CST_LOW (arg1) == min_lo)
12922 return omit_one_operand (type, integer_zero_node, arg0);
12925 return fold_build2 (EQ_EXPR, type, op0, op1);
12928 return omit_one_operand (type, integer_one_node, arg0);
12931 return fold_build2 (NE_EXPR, type, op0, op1);
12936 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12938 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12942 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12943 return fold_build2 (NE_EXPR, type,
12944 fold_convert (TREE_TYPE (arg1), arg0),
12947 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12948 return fold_build2 (EQ_EXPR, type,
12949 fold_convert (TREE_TYPE (arg1), arg0),
12955 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12956 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12957 && TYPE_UNSIGNED (arg1_type)
12958 /* We will flip the signedness of the comparison operator
12959 associated with the mode of arg1, so the sign bit is
12960 specified by this mode. Check that arg1 is the signed
12961 max associated with this sign bit. */
12962 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12963 /* signed_type does not work on pointer types. */
12964 && INTEGRAL_TYPE_P (arg1_type))
12966 /* The following case also applies to X < signed_max+1
12967 and X >= signed_max+1 because previous transformations. */
12968 if (code == LE_EXPR || code == GT_EXPR)
12971 st = signed_type_for (TREE_TYPE (arg1));
12972 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12973 type, fold_convert (st, arg0),
12974 build_int_cst (st, 0));
12980 /* If we are comparing an ABS_EXPR with a constant, we can
12981 convert all the cases into explicit comparisons, but they may
12982 well not be faster than doing the ABS and one comparison.
12983 But ABS (X) <= C is a range comparison, which becomes a subtraction
12984 and a comparison, and is probably faster. */
12985 if (code == LE_EXPR
12986 && TREE_CODE (arg1) == INTEGER_CST
12987 && TREE_CODE (arg0) == ABS_EXPR
12988 && ! TREE_SIDE_EFFECTS (arg0)
12989 && (0 != (tem = negate_expr (arg1)))
12990 && TREE_CODE (tem) == INTEGER_CST
12991 && !TREE_OVERFLOW (tem))
12992 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12993 build2 (GE_EXPR, type,
12994 TREE_OPERAND (arg0, 0), tem),
12995 build2 (LE_EXPR, type,
12996 TREE_OPERAND (arg0, 0), arg1));
12998 /* Convert ABS_EXPR<x> >= 0 to true. */
12999 strict_overflow_p = false;
13000 if (code == GE_EXPR
13001 && (integer_zerop (arg1)
13002 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13003 && real_zerop (arg1)))
13004 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13006 if (strict_overflow_p)
13007 fold_overflow_warning (("assuming signed overflow does not occur "
13008 "when simplifying comparison of "
13009 "absolute value and zero"),
13010 WARN_STRICT_OVERFLOW_CONDITIONAL);
13011 return omit_one_operand (type, integer_one_node, arg0);
13014 /* Convert ABS_EXPR<x> < 0 to false. */
13015 strict_overflow_p = false;
13016 if (code == LT_EXPR
13017 && (integer_zerop (arg1) || real_zerop (arg1))
13018 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13020 if (strict_overflow_p)
13021 fold_overflow_warning (("assuming signed overflow does not occur "
13022 "when simplifying comparison of "
13023 "absolute value and zero"),
13024 WARN_STRICT_OVERFLOW_CONDITIONAL);
13025 return omit_one_operand (type, integer_zero_node, arg0);
13028 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13029 and similarly for >= into !=. */
13030 if ((code == LT_EXPR || code == GE_EXPR)
13031 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13032 && TREE_CODE (arg1) == LSHIFT_EXPR
13033 && integer_onep (TREE_OPERAND (arg1, 0)))
13034 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13035 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13036 TREE_OPERAND (arg1, 1)),
13037 build_int_cst (TREE_TYPE (arg0), 0));
13039 if ((code == LT_EXPR || code == GE_EXPR)
13040 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13041 && CONVERT_EXPR_P (arg1)
13042 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13043 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13045 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13046 fold_convert (TREE_TYPE (arg0),
13047 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13048 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13050 build_int_cst (TREE_TYPE (arg0), 0));
13054 case UNORDERED_EXPR:
13062 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13064 t1 = fold_relational_const (code, type, arg0, arg1);
13065 if (t1 != NULL_TREE)
13069 /* If the first operand is NaN, the result is constant. */
13070 if (TREE_CODE (arg0) == REAL_CST
13071 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13072 && (code != LTGT_EXPR || ! flag_trapping_math))
13074 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13075 ? integer_zero_node
13076 : integer_one_node;
13077 return omit_one_operand (type, t1, arg1);
13080 /* If the second operand is NaN, the result is constant. */
13081 if (TREE_CODE (arg1) == REAL_CST
13082 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13083 && (code != LTGT_EXPR || ! flag_trapping_math))
13085 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13086 ? integer_zero_node
13087 : integer_one_node;
13088 return omit_one_operand (type, t1, arg0);
13091 /* Simplify unordered comparison of something with itself. */
13092 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13093 && operand_equal_p (arg0, arg1, 0))
13094 return constant_boolean_node (1, type);
13096 if (code == LTGT_EXPR
13097 && !flag_trapping_math
13098 && operand_equal_p (arg0, arg1, 0))
13099 return constant_boolean_node (0, type);
13101 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13103 tree targ0 = strip_float_extensions (arg0);
13104 tree targ1 = strip_float_extensions (arg1);
13105 tree newtype = TREE_TYPE (targ0);
13107 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13108 newtype = TREE_TYPE (targ1);
13110 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13111 return fold_build2 (code, type, fold_convert (newtype, targ0),
13112 fold_convert (newtype, targ1));
13117 case COMPOUND_EXPR:
13118 /* When pedantic, a compound expression can be neither an lvalue
13119 nor an integer constant expression. */
13120 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13122 /* Don't let (0, 0) be null pointer constant. */
13123 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13124 : fold_convert (type, arg1);
13125 return pedantic_non_lvalue (tem);
13128 if ((TREE_CODE (arg0) == REAL_CST
13129 && TREE_CODE (arg1) == REAL_CST)
13130 || (TREE_CODE (arg0) == INTEGER_CST
13131 && TREE_CODE (arg1) == INTEGER_CST))
13132 return build_complex (type, arg0, arg1);
13136 /* An ASSERT_EXPR should never be passed to fold_binary. */
13137 gcc_unreachable ();
13141 } /* switch (code) */
13144 /* Callback for walk_tree, looking for LABEL_EXPR.
13145 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13146 Do not check the sub-tree of GOTO_EXPR. */
13149 contains_label_1 (tree *tp,
13150 int *walk_subtrees,
13151 void *data ATTRIBUTE_UNUSED)
13153 switch (TREE_CODE (*tp))
13158 *walk_subtrees = 0;
13165 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13166 accessible from outside the sub-tree. Returns NULL_TREE if no
13167 addressable label is found. */
13170 contains_label_p (tree st)
13172 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13175 /* Fold a ternary expression of code CODE and type TYPE with operands
13176 OP0, OP1, and OP2. Return the folded expression if folding is
13177 successful. Otherwise, return NULL_TREE. */
13180 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13183 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13184 enum tree_code_class kind = TREE_CODE_CLASS (code);
13186 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13187 && TREE_CODE_LENGTH (code) == 3);
13189 /* Strip any conversions that don't change the mode. This is safe
13190 for every expression, except for a comparison expression because
13191 its signedness is derived from its operands. So, in the latter
13192 case, only strip conversions that don't change the signedness.
13194 Note that this is done as an internal manipulation within the
13195 constant folder, in order to find the simplest representation of
13196 the arguments so that their form can be studied. In any cases,
13197 the appropriate type conversions should be put back in the tree
13198 that will get out of the constant folder. */
13213 case COMPONENT_REF:
13214 if (TREE_CODE (arg0) == CONSTRUCTOR
13215 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13217 unsigned HOST_WIDE_INT idx;
13219 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13226 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13227 so all simple results must be passed through pedantic_non_lvalue. */
13228 if (TREE_CODE (arg0) == INTEGER_CST)
13230 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13231 tem = integer_zerop (arg0) ? op2 : op1;
13232 /* Only optimize constant conditions when the selected branch
13233 has the same type as the COND_EXPR. This avoids optimizing
13234 away "c ? x : throw", where the throw has a void type.
13235 Avoid throwing away that operand which contains label. */
13236 if ((!TREE_SIDE_EFFECTS (unused_op)
13237 || !contains_label_p (unused_op))
13238 && (! VOID_TYPE_P (TREE_TYPE (tem))
13239 || VOID_TYPE_P (type)))
13240 return pedantic_non_lvalue (tem);
13243 if (operand_equal_p (arg1, op2, 0))
13244 return pedantic_omit_one_operand (type, arg1, arg0);
13246 /* If we have A op B ? A : C, we may be able to convert this to a
13247 simpler expression, depending on the operation and the values
13248 of B and C. Signed zeros prevent all of these transformations,
13249 for reasons given above each one.
13251 Also try swapping the arguments and inverting the conditional. */
13252 if (COMPARISON_CLASS_P (arg0)
13253 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13254 arg1, TREE_OPERAND (arg0, 1))
13255 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13257 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13262 if (COMPARISON_CLASS_P (arg0)
13263 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13265 TREE_OPERAND (arg0, 1))
13266 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13268 tem = fold_truth_not_expr (arg0);
13269 if (tem && COMPARISON_CLASS_P (tem))
13271 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13277 /* If the second operand is simpler than the third, swap them
13278 since that produces better jump optimization results. */
13279 if (truth_value_p (TREE_CODE (arg0))
13280 && tree_swap_operands_p (op1, op2, false))
13282 /* See if this can be inverted. If it can't, possibly because
13283 it was a floating-point inequality comparison, don't do
13285 tem = fold_truth_not_expr (arg0);
13287 return fold_build3 (code, type, tem, op2, op1);
13290 /* Convert A ? 1 : 0 to simply A. */
13291 if (integer_onep (op1)
13292 && integer_zerop (op2)
13293 /* If we try to convert OP0 to our type, the
13294 call to fold will try to move the conversion inside
13295 a COND, which will recurse. In that case, the COND_EXPR
13296 is probably the best choice, so leave it alone. */
13297 && type == TREE_TYPE (arg0))
13298 return pedantic_non_lvalue (arg0);
13300 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13301 over COND_EXPR in cases such as floating point comparisons. */
13302 if (integer_zerop (op1)
13303 && integer_onep (op2)
13304 && truth_value_p (TREE_CODE (arg0)))
13305 return pedantic_non_lvalue (fold_convert (type,
13306 invert_truthvalue (arg0)));
13308 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13309 if (TREE_CODE (arg0) == LT_EXPR
13310 && integer_zerop (TREE_OPERAND (arg0, 1))
13311 && integer_zerop (op2)
13312 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13314 /* sign_bit_p only checks ARG1 bits within A's precision.
13315 If <sign bit of A> has wider type than A, bits outside
13316 of A's precision in <sign bit of A> need to be checked.
13317 If they are all 0, this optimization needs to be done
13318 in unsigned A's type, if they are all 1 in signed A's type,
13319 otherwise this can't be done. */
13320 if (TYPE_PRECISION (TREE_TYPE (tem))
13321 < TYPE_PRECISION (TREE_TYPE (arg1))
13322 && TYPE_PRECISION (TREE_TYPE (tem))
13323 < TYPE_PRECISION (type))
13325 unsigned HOST_WIDE_INT mask_lo;
13326 HOST_WIDE_INT mask_hi;
13327 int inner_width, outer_width;
13330 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13331 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13332 if (outer_width > TYPE_PRECISION (type))
13333 outer_width = TYPE_PRECISION (type);
13335 if (outer_width > HOST_BITS_PER_WIDE_INT)
13337 mask_hi = ((unsigned HOST_WIDE_INT) -1
13338 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13344 mask_lo = ((unsigned HOST_WIDE_INT) -1
13345 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13347 if (inner_width > HOST_BITS_PER_WIDE_INT)
13349 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13350 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13354 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13355 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13357 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13358 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13360 tem_type = signed_type_for (TREE_TYPE (tem));
13361 tem = fold_convert (tem_type, tem);
13363 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13364 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13366 tem_type = unsigned_type_for (TREE_TYPE (tem));
13367 tem = fold_convert (tem_type, tem);
13374 return fold_convert (type,
13375 fold_build2 (BIT_AND_EXPR,
13376 TREE_TYPE (tem), tem,
13377 fold_convert (TREE_TYPE (tem),
13381 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13382 already handled above. */
13383 if (TREE_CODE (arg0) == BIT_AND_EXPR
13384 && integer_onep (TREE_OPERAND (arg0, 1))
13385 && integer_zerop (op2)
13386 && integer_pow2p (arg1))
13388 tree tem = TREE_OPERAND (arg0, 0);
13390 if (TREE_CODE (tem) == RSHIFT_EXPR
13391 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13392 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13393 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13394 return fold_build2 (BIT_AND_EXPR, type,
13395 TREE_OPERAND (tem, 0), arg1);
13398 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13399 is probably obsolete because the first operand should be a
13400 truth value (that's why we have the two cases above), but let's
13401 leave it in until we can confirm this for all front-ends. */
13402 if (integer_zerop (op2)
13403 && TREE_CODE (arg0) == NE_EXPR
13404 && integer_zerop (TREE_OPERAND (arg0, 1))
13405 && integer_pow2p (arg1)
13406 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13407 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13408 arg1, OEP_ONLY_CONST))
13409 return pedantic_non_lvalue (fold_convert (type,
13410 TREE_OPERAND (arg0, 0)));
13412 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13413 if (integer_zerop (op2)
13414 && truth_value_p (TREE_CODE (arg0))
13415 && truth_value_p (TREE_CODE (arg1)))
13416 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13417 fold_convert (type, arg0),
13420 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13421 if (integer_onep (op2)
13422 && truth_value_p (TREE_CODE (arg0))
13423 && truth_value_p (TREE_CODE (arg1)))
13425 /* Only perform transformation if ARG0 is easily inverted. */
13426 tem = fold_truth_not_expr (arg0);
13428 return fold_build2 (TRUTH_ORIF_EXPR, type,
13429 fold_convert (type, tem),
13433 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13434 if (integer_zerop (arg1)
13435 && truth_value_p (TREE_CODE (arg0))
13436 && truth_value_p (TREE_CODE (op2)))
13438 /* Only perform transformation if ARG0 is easily inverted. */
13439 tem = fold_truth_not_expr (arg0);
13441 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13442 fold_convert (type, tem),
13446 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13447 if (integer_onep (arg1)
13448 && truth_value_p (TREE_CODE (arg0))
13449 && truth_value_p (TREE_CODE (op2)))
13450 return fold_build2 (TRUTH_ORIF_EXPR, type,
13451 fold_convert (type, arg0),
13457 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13458 of fold_ternary on them. */
13459 gcc_unreachable ();
13461 case BIT_FIELD_REF:
13462 if ((TREE_CODE (arg0) == VECTOR_CST
13463 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13464 && type == TREE_TYPE (TREE_TYPE (arg0)))
13466 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13467 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13470 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13471 && (idx % width) == 0
13472 && (idx = idx / width)
13473 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13475 tree elements = NULL_TREE;
13477 if (TREE_CODE (arg0) == VECTOR_CST)
13478 elements = TREE_VECTOR_CST_ELTS (arg0);
13481 unsigned HOST_WIDE_INT idx;
13484 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13485 elements = tree_cons (NULL_TREE, value, elements);
13487 while (idx-- > 0 && elements)
13488 elements = TREE_CHAIN (elements);
13490 return TREE_VALUE (elements);
13492 return fold_convert (type, integer_zero_node);
13496 /* A bit-field-ref that referenced the full argument can be stripped. */
13497 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13498 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13499 && integer_zerop (op2))
13500 return fold_convert (type, arg0);
13506 } /* switch (code) */
13509 /* Perform constant folding and related simplification of EXPR.
13510 The related simplifications include x*1 => x, x*0 => 0, etc.,
13511 and application of the associative law.
13512 NOP_EXPR conversions may be removed freely (as long as we
13513 are careful not to change the type of the overall expression).
13514 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13515 but we can constant-fold them if they have constant operands. */
13517 #ifdef ENABLE_FOLD_CHECKING
13518 # define fold(x) fold_1 (x)
13519 static tree fold_1 (tree);
13525 const tree t = expr;
13526 enum tree_code code = TREE_CODE (t);
13527 enum tree_code_class kind = TREE_CODE_CLASS (code);
13530 /* Return right away if a constant. */
13531 if (kind == tcc_constant)
13534 /* CALL_EXPR-like objects with variable numbers of operands are
13535 treated specially. */
13536 if (kind == tcc_vl_exp)
13538 if (code == CALL_EXPR)
13540 tem = fold_call_expr (expr, false);
13541 return tem ? tem : expr;
13546 if (IS_EXPR_CODE_CLASS (kind))
13548 tree type = TREE_TYPE (t);
13549 tree op0, op1, op2;
13551 switch (TREE_CODE_LENGTH (code))
13554 op0 = TREE_OPERAND (t, 0);
13555 tem = fold_unary (code, type, op0);
13556 return tem ? tem : expr;
13558 op0 = TREE_OPERAND (t, 0);
13559 op1 = TREE_OPERAND (t, 1);
13560 tem = fold_binary (code, type, op0, op1);
13561 return tem ? tem : expr;
13563 op0 = TREE_OPERAND (t, 0);
13564 op1 = TREE_OPERAND (t, 1);
13565 op2 = TREE_OPERAND (t, 2);
13566 tem = fold_ternary (code, type, op0, op1, op2);
13567 return tem ? tem : expr;
13577 tree op0 = TREE_OPERAND (t, 0);
13578 tree op1 = TREE_OPERAND (t, 1);
13580 if (TREE_CODE (op1) == INTEGER_CST
13581 && TREE_CODE (op0) == CONSTRUCTOR
13582 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13584 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13585 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13586 unsigned HOST_WIDE_INT begin = 0;
13588 /* Find a matching index by means of a binary search. */
13589 while (begin != end)
13591 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13592 tree index = VEC_index (constructor_elt, elts, middle)->index;
13594 if (TREE_CODE (index) == INTEGER_CST
13595 && tree_int_cst_lt (index, op1))
13596 begin = middle + 1;
13597 else if (TREE_CODE (index) == INTEGER_CST
13598 && tree_int_cst_lt (op1, index))
13600 else if (TREE_CODE (index) == RANGE_EXPR
13601 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13602 begin = middle + 1;
13603 else if (TREE_CODE (index) == RANGE_EXPR
13604 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13607 return VEC_index (constructor_elt, elts, middle)->value;
13615 return fold (DECL_INITIAL (t));
13619 } /* switch (code) */
13622 #ifdef ENABLE_FOLD_CHECKING
13625 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13626 static void fold_check_failed (const_tree, const_tree);
13627 void print_fold_checksum (const_tree);
13629 /* When --enable-checking=fold, compute a digest of expr before
13630 and after actual fold call to see if fold did not accidentally
13631 change original expr. */
13637 struct md5_ctx ctx;
13638 unsigned char checksum_before[16], checksum_after[16];
13641 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13642 md5_init_ctx (&ctx);
13643 fold_checksum_tree (expr, &ctx, ht);
13644 md5_finish_ctx (&ctx, checksum_before);
13647 ret = fold_1 (expr);
13649 md5_init_ctx (&ctx);
13650 fold_checksum_tree (expr, &ctx, ht);
13651 md5_finish_ctx (&ctx, checksum_after);
13654 if (memcmp (checksum_before, checksum_after, 16))
13655 fold_check_failed (expr, ret);
13661 print_fold_checksum (const_tree expr)
13663 struct md5_ctx ctx;
13664 unsigned char checksum[16], cnt;
13667 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13668 md5_init_ctx (&ctx);
13669 fold_checksum_tree (expr, &ctx, ht);
13670 md5_finish_ctx (&ctx, checksum);
13672 for (cnt = 0; cnt < 16; ++cnt)
13673 fprintf (stderr, "%02x", checksum[cnt]);
13674 putc ('\n', stderr);
13678 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13680 internal_error ("fold check: original tree changed by fold");
13684 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13687 enum tree_code code;
13688 union tree_node buf;
13693 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13694 <= sizeof (struct tree_function_decl))
13695 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13698 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13702 code = TREE_CODE (expr);
13703 if (TREE_CODE_CLASS (code) == tcc_declaration
13704 && DECL_ASSEMBLER_NAME_SET_P (expr))
13706 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13707 memcpy ((char *) &buf, expr, tree_size (expr));
13708 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13709 expr = (tree) &buf;
13711 else if (TREE_CODE_CLASS (code) == tcc_type
13712 && (TYPE_POINTER_TO (expr)
13713 || TYPE_REFERENCE_TO (expr)
13714 || TYPE_CACHED_VALUES_P (expr)
13715 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13716 || TYPE_NEXT_VARIANT (expr)))
13718 /* Allow these fields to be modified. */
13720 memcpy ((char *) &buf, expr, tree_size (expr));
13721 expr = tmp = (tree) &buf;
13722 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13723 TYPE_POINTER_TO (tmp) = NULL;
13724 TYPE_REFERENCE_TO (tmp) = NULL;
13725 TYPE_NEXT_VARIANT (tmp) = NULL;
13726 if (TYPE_CACHED_VALUES_P (tmp))
13728 TYPE_CACHED_VALUES_P (tmp) = 0;
13729 TYPE_CACHED_VALUES (tmp) = NULL;
13732 md5_process_bytes (expr, tree_size (expr), ctx);
13733 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13734 if (TREE_CODE_CLASS (code) != tcc_type
13735 && TREE_CODE_CLASS (code) != tcc_declaration
13736 && code != TREE_LIST
13737 && code != SSA_NAME)
13738 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13739 switch (TREE_CODE_CLASS (code))
13745 md5_process_bytes (TREE_STRING_POINTER (expr),
13746 TREE_STRING_LENGTH (expr), ctx);
13749 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13750 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13753 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13759 case tcc_exceptional:
13763 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13764 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13765 expr = TREE_CHAIN (expr);
13766 goto recursive_label;
13769 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13770 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13776 case tcc_expression:
13777 case tcc_reference:
13778 case tcc_comparison:
13781 case tcc_statement:
13783 len = TREE_OPERAND_LENGTH (expr);
13784 for (i = 0; i < len; ++i)
13785 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13787 case tcc_declaration:
13788 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13789 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13790 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13792 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13793 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13794 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13795 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13796 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13798 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13799 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13801 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13803 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13804 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13805 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13809 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13810 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13811 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13812 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13813 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13814 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13815 if (INTEGRAL_TYPE_P (expr)
13816 || SCALAR_FLOAT_TYPE_P (expr))
13818 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13819 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13821 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13822 if (TREE_CODE (expr) == RECORD_TYPE
13823 || TREE_CODE (expr) == UNION_TYPE
13824 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13825 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13826 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13833 /* Helper function for outputting the checksum of a tree T. When
13834 debugging with gdb, you can "define mynext" to be "next" followed
13835 by "call debug_fold_checksum (op0)", then just trace down till the
13839 debug_fold_checksum (const_tree t)
13842 unsigned char checksum[16];
13843 struct md5_ctx ctx;
13844 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13846 md5_init_ctx (&ctx);
13847 fold_checksum_tree (t, &ctx, ht);
13848 md5_finish_ctx (&ctx, checksum);
13851 for (i = 0; i < 16; i++)
13852 fprintf (stderr, "%d ", checksum[i]);
13854 fprintf (stderr, "\n");
13859 /* Fold a unary tree expression with code CODE of type TYPE with an
13860 operand OP0. Return a folded expression if successful. Otherwise,
13861 return a tree expression with code CODE of type TYPE with an
13865 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13868 #ifdef ENABLE_FOLD_CHECKING
13869 unsigned char checksum_before[16], checksum_after[16];
13870 struct md5_ctx ctx;
13873 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13874 md5_init_ctx (&ctx);
13875 fold_checksum_tree (op0, &ctx, ht);
13876 md5_finish_ctx (&ctx, checksum_before);
13880 tem = fold_unary (code, type, op0);
13882 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13884 #ifdef ENABLE_FOLD_CHECKING
13885 md5_init_ctx (&ctx);
13886 fold_checksum_tree (op0, &ctx, ht);
13887 md5_finish_ctx (&ctx, checksum_after);
13890 if (memcmp (checksum_before, checksum_after, 16))
13891 fold_check_failed (op0, tem);
13896 /* Fold a binary tree expression with code CODE of type TYPE with
13897 operands OP0 and OP1. Return a folded expression if successful.
13898 Otherwise, return a tree expression with code CODE of type TYPE
13899 with operands OP0 and OP1. */
13902 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13906 #ifdef ENABLE_FOLD_CHECKING
13907 unsigned char checksum_before_op0[16],
13908 checksum_before_op1[16],
13909 checksum_after_op0[16],
13910 checksum_after_op1[16];
13911 struct md5_ctx ctx;
13914 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13915 md5_init_ctx (&ctx);
13916 fold_checksum_tree (op0, &ctx, ht);
13917 md5_finish_ctx (&ctx, checksum_before_op0);
13920 md5_init_ctx (&ctx);
13921 fold_checksum_tree (op1, &ctx, ht);
13922 md5_finish_ctx (&ctx, checksum_before_op1);
13926 tem = fold_binary (code, type, op0, op1);
13928 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13930 #ifdef ENABLE_FOLD_CHECKING
13931 md5_init_ctx (&ctx);
13932 fold_checksum_tree (op0, &ctx, ht);
13933 md5_finish_ctx (&ctx, checksum_after_op0);
13936 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13937 fold_check_failed (op0, tem);
13939 md5_init_ctx (&ctx);
13940 fold_checksum_tree (op1, &ctx, ht);
13941 md5_finish_ctx (&ctx, checksum_after_op1);
13944 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13945 fold_check_failed (op1, tem);
13950 /* Fold a ternary tree expression with code CODE of type TYPE with
13951 operands OP0, OP1, and OP2. Return a folded expression if
13952 successful. Otherwise, return a tree expression with code CODE of
13953 type TYPE with operands OP0, OP1, and OP2. */
13956 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13960 #ifdef ENABLE_FOLD_CHECKING
13961 unsigned char checksum_before_op0[16],
13962 checksum_before_op1[16],
13963 checksum_before_op2[16],
13964 checksum_after_op0[16],
13965 checksum_after_op1[16],
13966 checksum_after_op2[16];
13967 struct md5_ctx ctx;
13970 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13971 md5_init_ctx (&ctx);
13972 fold_checksum_tree (op0, &ctx, ht);
13973 md5_finish_ctx (&ctx, checksum_before_op0);
13976 md5_init_ctx (&ctx);
13977 fold_checksum_tree (op1, &ctx, ht);
13978 md5_finish_ctx (&ctx, checksum_before_op1);
13981 md5_init_ctx (&ctx);
13982 fold_checksum_tree (op2, &ctx, ht);
13983 md5_finish_ctx (&ctx, checksum_before_op2);
13987 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13988 tem = fold_ternary (code, type, op0, op1, op2);
13990 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13992 #ifdef ENABLE_FOLD_CHECKING
13993 md5_init_ctx (&ctx);
13994 fold_checksum_tree (op0, &ctx, ht);
13995 md5_finish_ctx (&ctx, checksum_after_op0);
13998 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13999 fold_check_failed (op0, tem);
14001 md5_init_ctx (&ctx);
14002 fold_checksum_tree (op1, &ctx, ht);
14003 md5_finish_ctx (&ctx, checksum_after_op1);
14006 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14007 fold_check_failed (op1, tem);
14009 md5_init_ctx (&ctx);
14010 fold_checksum_tree (op2, &ctx, ht);
14011 md5_finish_ctx (&ctx, checksum_after_op2);
14014 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14015 fold_check_failed (op2, tem);
14020 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14021 arguments in ARGARRAY, and a null static chain.
14022 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14023 of type TYPE from the given operands as constructed by build_call_array. */
14026 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14029 #ifdef ENABLE_FOLD_CHECKING
14030 unsigned char checksum_before_fn[16],
14031 checksum_before_arglist[16],
14032 checksum_after_fn[16],
14033 checksum_after_arglist[16];
14034 struct md5_ctx ctx;
14038 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14039 md5_init_ctx (&ctx);
14040 fold_checksum_tree (fn, &ctx, ht);
14041 md5_finish_ctx (&ctx, checksum_before_fn);
14044 md5_init_ctx (&ctx);
14045 for (i = 0; i < nargs; i++)
14046 fold_checksum_tree (argarray[i], &ctx, ht);
14047 md5_finish_ctx (&ctx, checksum_before_arglist);
14051 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14053 #ifdef ENABLE_FOLD_CHECKING
14054 md5_init_ctx (&ctx);
14055 fold_checksum_tree (fn, &ctx, ht);
14056 md5_finish_ctx (&ctx, checksum_after_fn);
14059 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14060 fold_check_failed (fn, tem);
14062 md5_init_ctx (&ctx);
14063 for (i = 0; i < nargs; i++)
14064 fold_checksum_tree (argarray[i], &ctx, ht);
14065 md5_finish_ctx (&ctx, checksum_after_arglist);
14068 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14069 fold_check_failed (NULL_TREE, tem);
14074 /* Perform constant folding and related simplification of initializer
14075 expression EXPR. These behave identically to "fold_buildN" but ignore
14076 potential run-time traps and exceptions that fold must preserve. */
14078 #define START_FOLD_INIT \
14079 int saved_signaling_nans = flag_signaling_nans;\
14080 int saved_trapping_math = flag_trapping_math;\
14081 int saved_rounding_math = flag_rounding_math;\
14082 int saved_trapv = flag_trapv;\
14083 int saved_folding_initializer = folding_initializer;\
14084 flag_signaling_nans = 0;\
14085 flag_trapping_math = 0;\
14086 flag_rounding_math = 0;\
14088 folding_initializer = 1;
14090 #define END_FOLD_INIT \
14091 flag_signaling_nans = saved_signaling_nans;\
14092 flag_trapping_math = saved_trapping_math;\
14093 flag_rounding_math = saved_rounding_math;\
14094 flag_trapv = saved_trapv;\
14095 folding_initializer = saved_folding_initializer;
14098 fold_build1_initializer (enum tree_code code, tree type, tree op)
14103 result = fold_build1 (code, type, op);
14110 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14115 result = fold_build2 (code, type, op0, op1);
14122 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14128 result = fold_build3 (code, type, op0, op1, op2);
14135 fold_build_call_array_initializer (tree type, tree fn,
14136 int nargs, tree *argarray)
14141 result = fold_build_call_array (type, fn, nargs, argarray);
14147 #undef START_FOLD_INIT
14148 #undef END_FOLD_INIT
14150 /* Determine if first argument is a multiple of second argument. Return 0 if
14151 it is not, or we cannot easily determined it to be.
14153 An example of the sort of thing we care about (at this point; this routine
14154 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14155 fold cases do now) is discovering that
14157 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14163 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14165 This code also handles discovering that
14167 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14169 is a multiple of 8 so we don't have to worry about dealing with a
14170 possible remainder.
14172 Note that we *look* inside a SAVE_EXPR only to determine how it was
14173 calculated; it is not safe for fold to do much of anything else with the
14174 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14175 at run time. For example, the latter example above *cannot* be implemented
14176 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14177 evaluation time of the original SAVE_EXPR is not necessarily the same at
14178 the time the new expression is evaluated. The only optimization of this
14179 sort that would be valid is changing
14181 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14185 SAVE_EXPR (I) * SAVE_EXPR (J)
14187 (where the same SAVE_EXPR (J) is used in the original and the
14188 transformed version). */
14191 multiple_of_p (tree type, const_tree top, const_tree bottom)
14193 if (operand_equal_p (top, bottom, 0))
14196 if (TREE_CODE (type) != INTEGER_TYPE)
14199 switch (TREE_CODE (top))
14202 /* Bitwise and provides a power of two multiple. If the mask is
14203 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14204 if (!integer_pow2p (bottom))
14209 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14210 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14214 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14215 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14218 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14222 op1 = TREE_OPERAND (top, 1);
14223 /* const_binop may not detect overflow correctly,
14224 so check for it explicitly here. */
14225 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14226 > TREE_INT_CST_LOW (op1)
14227 && TREE_INT_CST_HIGH (op1) == 0
14228 && 0 != (t1 = fold_convert (type,
14229 const_binop (LSHIFT_EXPR,
14232 && !TREE_OVERFLOW (t1))
14233 return multiple_of_p (type, t1, bottom);
14238 /* Can't handle conversions from non-integral or wider integral type. */
14239 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14240 || (TYPE_PRECISION (type)
14241 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14244 /* .. fall through ... */
14247 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14250 if (TREE_CODE (bottom) != INTEGER_CST
14251 || integer_zerop (bottom)
14252 || (TYPE_UNSIGNED (type)
14253 && (tree_int_cst_sgn (top) < 0
14254 || tree_int_cst_sgn (bottom) < 0)))
14256 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14264 /* Return true if CODE or TYPE is known to be non-negative. */
14267 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14269 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14270 && truth_value_p (code))
14271 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14272 have a signed:1 type (where the value is -1 and 0). */
14277 /* Return true if (CODE OP0) is known to be non-negative. If the return
14278 value is based on the assumption that signed overflow is undefined,
14279 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14280 *STRICT_OVERFLOW_P. */
14283 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14284 bool *strict_overflow_p)
14286 if (TYPE_UNSIGNED (type))
14292 /* We can't return 1 if flag_wrapv is set because
14293 ABS_EXPR<INT_MIN> = INT_MIN. */
14294 if (!INTEGRAL_TYPE_P (type))
14296 if (TYPE_OVERFLOW_UNDEFINED (type))
14298 *strict_overflow_p = true;
14303 case NON_LVALUE_EXPR:
14305 case FIX_TRUNC_EXPR:
14306 return tree_expr_nonnegative_warnv_p (op0,
14307 strict_overflow_p);
14311 tree inner_type = TREE_TYPE (op0);
14312 tree outer_type = type;
14314 if (TREE_CODE (outer_type) == REAL_TYPE)
14316 if (TREE_CODE (inner_type) == REAL_TYPE)
14317 return tree_expr_nonnegative_warnv_p (op0,
14318 strict_overflow_p);
14319 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14321 if (TYPE_UNSIGNED (inner_type))
14323 return tree_expr_nonnegative_warnv_p (op0,
14324 strict_overflow_p);
14327 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14329 if (TREE_CODE (inner_type) == REAL_TYPE)
14330 return tree_expr_nonnegative_warnv_p (op0,
14331 strict_overflow_p);
14332 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14333 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14334 && TYPE_UNSIGNED (inner_type);
14340 return tree_simple_nonnegative_warnv_p (code, type);
14343 /* We don't know sign of `t', so be conservative and return false. */
14347 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14348 value is based on the assumption that signed overflow is undefined,
14349 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14350 *STRICT_OVERFLOW_P. */
14353 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14354 tree op1, bool *strict_overflow_p)
14356 if (TYPE_UNSIGNED (type))
14361 case POINTER_PLUS_EXPR:
14363 if (FLOAT_TYPE_P (type))
14364 return (tree_expr_nonnegative_warnv_p (op0,
14366 && tree_expr_nonnegative_warnv_p (op1,
14367 strict_overflow_p));
14369 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14370 both unsigned and at least 2 bits shorter than the result. */
14371 if (TREE_CODE (type) == INTEGER_TYPE
14372 && TREE_CODE (op0) == NOP_EXPR
14373 && TREE_CODE (op1) == NOP_EXPR)
14375 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14376 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14377 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14378 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14380 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14381 TYPE_PRECISION (inner2)) + 1;
14382 return prec < TYPE_PRECISION (type);
14388 if (FLOAT_TYPE_P (type))
14390 /* x * x for floating point x is always non-negative. */
14391 if (operand_equal_p (op0, op1, 0))
14393 return (tree_expr_nonnegative_warnv_p (op0,
14395 && tree_expr_nonnegative_warnv_p (op1,
14396 strict_overflow_p));
14399 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14400 both unsigned and their total bits is shorter than the result. */
14401 if (TREE_CODE (type) == INTEGER_TYPE
14402 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14403 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14405 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14406 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14408 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14409 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14412 bool unsigned0 = TYPE_UNSIGNED (inner0);
14413 bool unsigned1 = TYPE_UNSIGNED (inner1);
14415 if (TREE_CODE (op0) == INTEGER_CST)
14416 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14418 if (TREE_CODE (op1) == INTEGER_CST)
14419 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14421 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14422 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14424 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14425 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14426 : TYPE_PRECISION (inner0);
14428 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14429 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14430 : TYPE_PRECISION (inner1);
14432 return precision0 + precision1 < TYPE_PRECISION (type);
14439 return (tree_expr_nonnegative_warnv_p (op0,
14441 || tree_expr_nonnegative_warnv_p (op1,
14442 strict_overflow_p));
14448 case TRUNC_DIV_EXPR:
14449 case CEIL_DIV_EXPR:
14450 case FLOOR_DIV_EXPR:
14451 case ROUND_DIV_EXPR:
14452 return (tree_expr_nonnegative_warnv_p (op0,
14454 && tree_expr_nonnegative_warnv_p (op1,
14455 strict_overflow_p));
14457 case TRUNC_MOD_EXPR:
14458 case CEIL_MOD_EXPR:
14459 case FLOOR_MOD_EXPR:
14460 case ROUND_MOD_EXPR:
14461 return tree_expr_nonnegative_warnv_p (op0,
14462 strict_overflow_p);
14464 return tree_simple_nonnegative_warnv_p (code, type);
14467 /* We don't know sign of `t', so be conservative and return false. */
14471 /* Return true if T is known to be non-negative. If the return
14472 value is based on the assumption that signed overflow is undefined,
14473 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14474 *STRICT_OVERFLOW_P. */
14477 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14479 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14482 switch (TREE_CODE (t))
14485 return tree_int_cst_sgn (t) >= 0;
14488 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14491 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14494 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14496 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14497 strict_overflow_p));
14499 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14502 /* We don't know sign of `t', so be conservative and return false. */
14506 /* Return true if T is known to be non-negative. If the return
14507 value is based on the assumption that signed overflow is undefined,
14508 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14509 *STRICT_OVERFLOW_P. */
14512 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14513 tree arg0, tree arg1, bool *strict_overflow_p)
14515 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14516 switch (DECL_FUNCTION_CODE (fndecl))
14518 CASE_FLT_FN (BUILT_IN_ACOS):
14519 CASE_FLT_FN (BUILT_IN_ACOSH):
14520 CASE_FLT_FN (BUILT_IN_CABS):
14521 CASE_FLT_FN (BUILT_IN_COSH):
14522 CASE_FLT_FN (BUILT_IN_ERFC):
14523 CASE_FLT_FN (BUILT_IN_EXP):
14524 CASE_FLT_FN (BUILT_IN_EXP10):
14525 CASE_FLT_FN (BUILT_IN_EXP2):
14526 CASE_FLT_FN (BUILT_IN_FABS):
14527 CASE_FLT_FN (BUILT_IN_FDIM):
14528 CASE_FLT_FN (BUILT_IN_HYPOT):
14529 CASE_FLT_FN (BUILT_IN_POW10):
14530 CASE_INT_FN (BUILT_IN_FFS):
14531 CASE_INT_FN (BUILT_IN_PARITY):
14532 CASE_INT_FN (BUILT_IN_POPCOUNT):
14533 case BUILT_IN_BSWAP32:
14534 case BUILT_IN_BSWAP64:
14538 CASE_FLT_FN (BUILT_IN_SQRT):
14539 /* sqrt(-0.0) is -0.0. */
14540 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14542 return tree_expr_nonnegative_warnv_p (arg0,
14543 strict_overflow_p);
14545 CASE_FLT_FN (BUILT_IN_ASINH):
14546 CASE_FLT_FN (BUILT_IN_ATAN):
14547 CASE_FLT_FN (BUILT_IN_ATANH):
14548 CASE_FLT_FN (BUILT_IN_CBRT):
14549 CASE_FLT_FN (BUILT_IN_CEIL):
14550 CASE_FLT_FN (BUILT_IN_ERF):
14551 CASE_FLT_FN (BUILT_IN_EXPM1):
14552 CASE_FLT_FN (BUILT_IN_FLOOR):
14553 CASE_FLT_FN (BUILT_IN_FMOD):
14554 CASE_FLT_FN (BUILT_IN_FREXP):
14555 CASE_FLT_FN (BUILT_IN_LCEIL):
14556 CASE_FLT_FN (BUILT_IN_LDEXP):
14557 CASE_FLT_FN (BUILT_IN_LFLOOR):
14558 CASE_FLT_FN (BUILT_IN_LLCEIL):
14559 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14560 CASE_FLT_FN (BUILT_IN_LLRINT):
14561 CASE_FLT_FN (BUILT_IN_LLROUND):
14562 CASE_FLT_FN (BUILT_IN_LRINT):
14563 CASE_FLT_FN (BUILT_IN_LROUND):
14564 CASE_FLT_FN (BUILT_IN_MODF):
14565 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14566 CASE_FLT_FN (BUILT_IN_RINT):
14567 CASE_FLT_FN (BUILT_IN_ROUND):
14568 CASE_FLT_FN (BUILT_IN_SCALB):
14569 CASE_FLT_FN (BUILT_IN_SCALBLN):
14570 CASE_FLT_FN (BUILT_IN_SCALBN):
14571 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14572 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14573 CASE_FLT_FN (BUILT_IN_SINH):
14574 CASE_FLT_FN (BUILT_IN_TANH):
14575 CASE_FLT_FN (BUILT_IN_TRUNC):
14576 /* True if the 1st argument is nonnegative. */
14577 return tree_expr_nonnegative_warnv_p (arg0,
14578 strict_overflow_p);
14580 CASE_FLT_FN (BUILT_IN_FMAX):
14581 /* True if the 1st OR 2nd arguments are nonnegative. */
14582 return (tree_expr_nonnegative_warnv_p (arg0,
14584 || (tree_expr_nonnegative_warnv_p (arg1,
14585 strict_overflow_p)));
14587 CASE_FLT_FN (BUILT_IN_FMIN):
14588 /* True if the 1st AND 2nd arguments are nonnegative. */
14589 return (tree_expr_nonnegative_warnv_p (arg0,
14591 && (tree_expr_nonnegative_warnv_p (arg1,
14592 strict_overflow_p)));
14594 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14595 /* True if the 2nd argument is nonnegative. */
14596 return tree_expr_nonnegative_warnv_p (arg1,
14597 strict_overflow_p);
14599 CASE_FLT_FN (BUILT_IN_POWI):
14600 /* True if the 1st argument is nonnegative or the second
14601 argument is an even integer. */
14602 if (TREE_CODE (arg1) == INTEGER_CST
14603 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14605 return tree_expr_nonnegative_warnv_p (arg0,
14606 strict_overflow_p);
14608 CASE_FLT_FN (BUILT_IN_POW):
14609 /* True if the 1st argument is nonnegative or the second
14610 argument is an even integer valued real. */
14611 if (TREE_CODE (arg1) == REAL_CST)
14616 c = TREE_REAL_CST (arg1);
14617 n = real_to_integer (&c);
14620 REAL_VALUE_TYPE cint;
14621 real_from_integer (&cint, VOIDmode, n,
14622 n < 0 ? -1 : 0, 0);
14623 if (real_identical (&c, &cint))
14627 return tree_expr_nonnegative_warnv_p (arg0,
14628 strict_overflow_p);
14633 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14637 /* Return true if T is known to be non-negative. If the return
14638 value is based on the assumption that signed overflow is undefined,
14639 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14640 *STRICT_OVERFLOW_P. */
14643 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14645 enum tree_code code = TREE_CODE (t);
14646 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14653 tree temp = TARGET_EXPR_SLOT (t);
14654 t = TARGET_EXPR_INITIAL (t);
14656 /* If the initializer is non-void, then it's a normal expression
14657 that will be assigned to the slot. */
14658 if (!VOID_TYPE_P (t))
14659 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14661 /* Otherwise, the initializer sets the slot in some way. One common
14662 way is an assignment statement at the end of the initializer. */
14665 if (TREE_CODE (t) == BIND_EXPR)
14666 t = expr_last (BIND_EXPR_BODY (t));
14667 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14668 || TREE_CODE (t) == TRY_CATCH_EXPR)
14669 t = expr_last (TREE_OPERAND (t, 0));
14670 else if (TREE_CODE (t) == STATEMENT_LIST)
14675 if (TREE_CODE (t) == MODIFY_EXPR
14676 && TREE_OPERAND (t, 0) == temp)
14677 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14678 strict_overflow_p);
14685 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14686 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14688 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14689 get_callee_fndecl (t),
14692 strict_overflow_p);
14694 case COMPOUND_EXPR:
14696 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14697 strict_overflow_p);
14699 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14700 strict_overflow_p);
14702 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14703 strict_overflow_p);
14706 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14710 /* We don't know sign of `t', so be conservative and return false. */
14714 /* Return true if T is known to be non-negative. If the return
14715 value is based on the assumption that signed overflow is undefined,
14716 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14717 *STRICT_OVERFLOW_P. */
14720 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14722 enum tree_code code;
14723 if (t == error_mark_node)
14726 code = TREE_CODE (t);
14727 switch (TREE_CODE_CLASS (code))
14730 case tcc_comparison:
14731 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14733 TREE_OPERAND (t, 0),
14734 TREE_OPERAND (t, 1),
14735 strict_overflow_p);
14738 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14740 TREE_OPERAND (t, 0),
14741 strict_overflow_p);
14744 case tcc_declaration:
14745 case tcc_reference:
14746 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14754 case TRUTH_AND_EXPR:
14755 case TRUTH_OR_EXPR:
14756 case TRUTH_XOR_EXPR:
14757 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14759 TREE_OPERAND (t, 0),
14760 TREE_OPERAND (t, 1),
14761 strict_overflow_p);
14762 case TRUTH_NOT_EXPR:
14763 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14765 TREE_OPERAND (t, 0),
14766 strict_overflow_p);
14773 case WITH_SIZE_EXPR:
14777 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14780 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14784 /* Return true if `t' is known to be non-negative. Handle warnings
14785 about undefined signed overflow. */
14788 tree_expr_nonnegative_p (tree t)
14790 bool ret, strict_overflow_p;
14792 strict_overflow_p = false;
14793 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14794 if (strict_overflow_p)
14795 fold_overflow_warning (("assuming signed overflow does not occur when "
14796 "determining that expression is always "
14798 WARN_STRICT_OVERFLOW_MISC);
14803 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14804 For floating point we further ensure that T is not denormal.
14805 Similar logic is present in nonzero_address in rtlanal.h.
14807 If the return value is based on the assumption that signed overflow
14808 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14809 change *STRICT_OVERFLOW_P. */
14812 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14813 bool *strict_overflow_p)
14818 return tree_expr_nonzero_warnv_p (op0,
14819 strict_overflow_p);
14823 tree inner_type = TREE_TYPE (op0);
14824 tree outer_type = type;
14826 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14827 && tree_expr_nonzero_warnv_p (op0,
14828 strict_overflow_p));
14832 case NON_LVALUE_EXPR:
14833 return tree_expr_nonzero_warnv_p (op0,
14834 strict_overflow_p);
14843 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14844 For floating point we further ensure that T is not denormal.
14845 Similar logic is present in nonzero_address in rtlanal.h.
14847 If the return value is based on the assumption that signed overflow
14848 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14849 change *STRICT_OVERFLOW_P. */
14852 tree_binary_nonzero_warnv_p (enum tree_code code,
14855 tree op1, bool *strict_overflow_p)
14857 bool sub_strict_overflow_p;
14860 case POINTER_PLUS_EXPR:
14862 if (TYPE_OVERFLOW_UNDEFINED (type))
14864 /* With the presence of negative values it is hard
14865 to say something. */
14866 sub_strict_overflow_p = false;
14867 if (!tree_expr_nonnegative_warnv_p (op0,
14868 &sub_strict_overflow_p)
14869 || !tree_expr_nonnegative_warnv_p (op1,
14870 &sub_strict_overflow_p))
14872 /* One of operands must be positive and the other non-negative. */
14873 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14874 overflows, on a twos-complement machine the sum of two
14875 nonnegative numbers can never be zero. */
14876 return (tree_expr_nonzero_warnv_p (op0,
14878 || tree_expr_nonzero_warnv_p (op1,
14879 strict_overflow_p));
14884 if (TYPE_OVERFLOW_UNDEFINED (type))
14886 if (tree_expr_nonzero_warnv_p (op0,
14888 && tree_expr_nonzero_warnv_p (op1,
14889 strict_overflow_p))
14891 *strict_overflow_p = true;
14898 sub_strict_overflow_p = false;
14899 if (tree_expr_nonzero_warnv_p (op0,
14900 &sub_strict_overflow_p)
14901 && tree_expr_nonzero_warnv_p (op1,
14902 &sub_strict_overflow_p))
14904 if (sub_strict_overflow_p)
14905 *strict_overflow_p = true;
14910 sub_strict_overflow_p = false;
14911 if (tree_expr_nonzero_warnv_p (op0,
14912 &sub_strict_overflow_p))
14914 if (sub_strict_overflow_p)
14915 *strict_overflow_p = true;
14917 /* When both operands are nonzero, then MAX must be too. */
14918 if (tree_expr_nonzero_warnv_p (op1,
14919 strict_overflow_p))
14922 /* MAX where operand 0 is positive is positive. */
14923 return tree_expr_nonnegative_warnv_p (op0,
14924 strict_overflow_p);
14926 /* MAX where operand 1 is positive is positive. */
14927 else if (tree_expr_nonzero_warnv_p (op1,
14928 &sub_strict_overflow_p)
14929 && tree_expr_nonnegative_warnv_p (op1,
14930 &sub_strict_overflow_p))
14932 if (sub_strict_overflow_p)
14933 *strict_overflow_p = true;
14939 return (tree_expr_nonzero_warnv_p (op1,
14941 || tree_expr_nonzero_warnv_p (op0,
14942 strict_overflow_p));
14951 /* Return true when T is an address and is known to be nonzero.
14952 For floating point we further ensure that T is not denormal.
14953 Similar logic is present in nonzero_address in rtlanal.h.
14955 If the return value is based on the assumption that signed overflow
14956 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14957 change *STRICT_OVERFLOW_P. */
14960 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14962 bool sub_strict_overflow_p;
14963 switch (TREE_CODE (t))
14966 return !integer_zerop (t);
14970 tree base = get_base_address (TREE_OPERAND (t, 0));
14975 /* Weak declarations may link to NULL. */
14976 if (VAR_OR_FUNCTION_DECL_P (base))
14977 return !DECL_WEAK (base);
14979 /* Constants are never weak. */
14980 if (CONSTANT_CLASS_P (base))
14987 sub_strict_overflow_p = false;
14988 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14989 &sub_strict_overflow_p)
14990 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14991 &sub_strict_overflow_p))
14993 if (sub_strict_overflow_p)
14994 *strict_overflow_p = true;
15005 /* Return true when T is an address and is known to be nonzero.
15006 For floating point we further ensure that T is not denormal.
15007 Similar logic is present in nonzero_address in rtlanal.h.
15009 If the return value is based on the assumption that signed overflow
15010 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15011 change *STRICT_OVERFLOW_P. */
15014 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15016 tree type = TREE_TYPE (t);
15017 enum tree_code code;
15019 /* Doing something useful for floating point would need more work. */
15020 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15023 code = TREE_CODE (t);
15024 switch (TREE_CODE_CLASS (code))
15027 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15028 strict_overflow_p);
15030 case tcc_comparison:
15031 return tree_binary_nonzero_warnv_p (code, type,
15032 TREE_OPERAND (t, 0),
15033 TREE_OPERAND (t, 1),
15034 strict_overflow_p);
15036 case tcc_declaration:
15037 case tcc_reference:
15038 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15046 case TRUTH_NOT_EXPR:
15047 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15048 strict_overflow_p);
15050 case TRUTH_AND_EXPR:
15051 case TRUTH_OR_EXPR:
15052 case TRUTH_XOR_EXPR:
15053 return tree_binary_nonzero_warnv_p (code, type,
15054 TREE_OPERAND (t, 0),
15055 TREE_OPERAND (t, 1),
15056 strict_overflow_p);
15063 case WITH_SIZE_EXPR:
15067 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15069 case COMPOUND_EXPR:
15072 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15073 strict_overflow_p);
15076 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15077 strict_overflow_p);
15080 return alloca_call_p (t);
15088 /* Return true when T is an address and is known to be nonzero.
15089 Handle warnings about undefined signed overflow. */
15092 tree_expr_nonzero_p (tree t)
15094 bool ret, strict_overflow_p;
15096 strict_overflow_p = false;
15097 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15098 if (strict_overflow_p)
15099 fold_overflow_warning (("assuming signed overflow does not occur when "
15100 "determining that expression is always "
15102 WARN_STRICT_OVERFLOW_MISC);
15106 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15107 attempt to fold the expression to a constant without modifying TYPE,
15110 If the expression could be simplified to a constant, then return
15111 the constant. If the expression would not be simplified to a
15112 constant, then return NULL_TREE. */
15115 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15117 tree tem = fold_binary (code, type, op0, op1);
15118 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15121 /* Given the components of a unary expression CODE, TYPE and OP0,
15122 attempt to fold the expression to a constant without modifying
15125 If the expression could be simplified to a constant, then return
15126 the constant. If the expression would not be simplified to a
15127 constant, then return NULL_TREE. */
15130 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15132 tree tem = fold_unary (code, type, op0);
15133 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15136 /* If EXP represents referencing an element in a constant string
15137 (either via pointer arithmetic or array indexing), return the
15138 tree representing the value accessed, otherwise return NULL. */
15141 fold_read_from_constant_string (tree exp)
15143 if ((TREE_CODE (exp) == INDIRECT_REF
15144 || TREE_CODE (exp) == ARRAY_REF)
15145 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15147 tree exp1 = TREE_OPERAND (exp, 0);
15151 if (TREE_CODE (exp) == INDIRECT_REF)
15152 string = string_constant (exp1, &index);
15155 tree low_bound = array_ref_low_bound (exp);
15156 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15158 /* Optimize the special-case of a zero lower bound.
15160 We convert the low_bound to sizetype to avoid some problems
15161 with constant folding. (E.g. suppose the lower bound is 1,
15162 and its mode is QI. Without the conversion,l (ARRAY
15163 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15164 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15165 if (! integer_zerop (low_bound))
15166 index = size_diffop (index, fold_convert (sizetype, low_bound));
15172 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15173 && TREE_CODE (string) == STRING_CST
15174 && TREE_CODE (index) == INTEGER_CST
15175 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15176 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15178 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15179 return build_int_cst_type (TREE_TYPE (exp),
15180 (TREE_STRING_POINTER (string)
15181 [TREE_INT_CST_LOW (index)]));
15186 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15187 an integer constant, real, or fixed-point constant.
15189 TYPE is the type of the result. */
15192 fold_negate_const (tree arg0, tree type)
15194 tree t = NULL_TREE;
15196 switch (TREE_CODE (arg0))
15200 unsigned HOST_WIDE_INT low;
15201 HOST_WIDE_INT high;
15202 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15203 TREE_INT_CST_HIGH (arg0),
15205 t = force_fit_type_double (type, low, high, 1,
15206 (overflow | TREE_OVERFLOW (arg0))
15207 && !TYPE_UNSIGNED (type));
15212 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15217 FIXED_VALUE_TYPE f;
15218 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15219 &(TREE_FIXED_CST (arg0)), NULL,
15220 TYPE_SATURATING (type));
15221 t = build_fixed (type, f);
15222 /* Propagate overflow flags. */
15223 if (overflow_p | TREE_OVERFLOW (arg0))
15224 TREE_OVERFLOW (t) = 1;
15229 gcc_unreachable ();
15235 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15236 an integer constant or real constant.
15238 TYPE is the type of the result. */
15241 fold_abs_const (tree arg0, tree type)
15243 tree t = NULL_TREE;
15245 switch (TREE_CODE (arg0))
15248 /* If the value is unsigned, then the absolute value is
15249 the same as the ordinary value. */
15250 if (TYPE_UNSIGNED (type))
15252 /* Similarly, if the value is non-negative. */
15253 else if (INT_CST_LT (integer_minus_one_node, arg0))
15255 /* If the value is negative, then the absolute value is
15259 unsigned HOST_WIDE_INT low;
15260 HOST_WIDE_INT high;
15261 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15262 TREE_INT_CST_HIGH (arg0),
15264 t = force_fit_type_double (type, low, high, -1,
15265 overflow | TREE_OVERFLOW (arg0));
15270 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15271 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15277 gcc_unreachable ();
15283 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15284 constant. TYPE is the type of the result. */
15287 fold_not_const (tree arg0, tree type)
15289 tree t = NULL_TREE;
15291 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15293 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15294 ~TREE_INT_CST_HIGH (arg0), 0,
15295 TREE_OVERFLOW (arg0));
15300 /* Given CODE, a relational operator, the target type, TYPE and two
15301 constant operands OP0 and OP1, return the result of the
15302 relational operation. If the result is not a compile time
15303 constant, then return NULL_TREE. */
15306 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15308 int result, invert;
15310 /* From here on, the only cases we handle are when the result is
15311 known to be a constant. */
15313 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15315 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15316 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15318 /* Handle the cases where either operand is a NaN. */
15319 if (real_isnan (c0) || real_isnan (c1))
15329 case UNORDERED_EXPR:
15343 if (flag_trapping_math)
15349 gcc_unreachable ();
15352 return constant_boolean_node (result, type);
15355 return constant_boolean_node (real_compare (code, c0, c1), type);
15358 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15360 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15361 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15362 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15365 /* Handle equality/inequality of complex constants. */
15366 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15368 tree rcond = fold_relational_const (code, type,
15369 TREE_REALPART (op0),
15370 TREE_REALPART (op1));
15371 tree icond = fold_relational_const (code, type,
15372 TREE_IMAGPART (op0),
15373 TREE_IMAGPART (op1));
15374 if (code == EQ_EXPR)
15375 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15376 else if (code == NE_EXPR)
15377 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15382 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15384 To compute GT, swap the arguments and do LT.
15385 To compute GE, do LT and invert the result.
15386 To compute LE, swap the arguments, do LT and invert the result.
15387 To compute NE, do EQ and invert the result.
15389 Therefore, the code below must handle only EQ and LT. */
15391 if (code == LE_EXPR || code == GT_EXPR)
15396 code = swap_tree_comparison (code);
15399 /* Note that it is safe to invert for real values here because we
15400 have already handled the one case that it matters. */
15403 if (code == NE_EXPR || code == GE_EXPR)
15406 code = invert_tree_comparison (code, false);
15409 /* Compute a result for LT or EQ if args permit;
15410 Otherwise return T. */
15411 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15413 if (code == EQ_EXPR)
15414 result = tree_int_cst_equal (op0, op1);
15415 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15416 result = INT_CST_LT_UNSIGNED (op0, op1);
15418 result = INT_CST_LT (op0, op1);
15425 return constant_boolean_node (result, type);
15428 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15429 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15433 fold_build_cleanup_point_expr (tree type, tree expr)
15435 /* If the expression does not have side effects then we don't have to wrap
15436 it with a cleanup point expression. */
15437 if (!TREE_SIDE_EFFECTS (expr))
15440 /* If the expression is a return, check to see if the expression inside the
15441 return has no side effects or the right hand side of the modify expression
15442 inside the return. If either don't have side effects set we don't need to
15443 wrap the expression in a cleanup point expression. Note we don't check the
15444 left hand side of the modify because it should always be a return decl. */
15445 if (TREE_CODE (expr) == RETURN_EXPR)
15447 tree op = TREE_OPERAND (expr, 0);
15448 if (!op || !TREE_SIDE_EFFECTS (op))
15450 op = TREE_OPERAND (op, 1);
15451 if (!TREE_SIDE_EFFECTS (op))
15455 return build1 (CLEANUP_POINT_EXPR, type, expr);
15458 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15459 of an indirection through OP0, or NULL_TREE if no simplification is
15463 fold_indirect_ref_1 (tree type, tree op0)
15469 subtype = TREE_TYPE (sub);
15470 if (!POINTER_TYPE_P (subtype))
15473 if (TREE_CODE (sub) == ADDR_EXPR)
15475 tree op = TREE_OPERAND (sub, 0);
15476 tree optype = TREE_TYPE (op);
15477 /* *&CONST_DECL -> to the value of the const decl. */
15478 if (TREE_CODE (op) == CONST_DECL)
15479 return DECL_INITIAL (op);
15480 /* *&p => p; make sure to handle *&"str"[cst] here. */
15481 if (type == optype)
15483 tree fop = fold_read_from_constant_string (op);
15489 /* *(foo *)&fooarray => fooarray[0] */
15490 else if (TREE_CODE (optype) == ARRAY_TYPE
15491 && type == TREE_TYPE (optype))
15493 tree type_domain = TYPE_DOMAIN (optype);
15494 tree min_val = size_zero_node;
15495 if (type_domain && TYPE_MIN_VALUE (type_domain))
15496 min_val = TYPE_MIN_VALUE (type_domain);
15497 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15499 /* *(foo *)&complexfoo => __real__ complexfoo */
15500 else if (TREE_CODE (optype) == COMPLEX_TYPE
15501 && type == TREE_TYPE (optype))
15502 return fold_build1 (REALPART_EXPR, type, op);
15503 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15504 else if (TREE_CODE (optype) == VECTOR_TYPE
15505 && type == TREE_TYPE (optype))
15507 tree part_width = TYPE_SIZE (type);
15508 tree index = bitsize_int (0);
15509 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15513 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15514 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15515 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15517 tree op00 = TREE_OPERAND (sub, 0);
15518 tree op01 = TREE_OPERAND (sub, 1);
15522 op00type = TREE_TYPE (op00);
15523 if (TREE_CODE (op00) == ADDR_EXPR
15524 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15525 && type == TREE_TYPE (TREE_TYPE (op00type)))
15527 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15528 tree part_width = TYPE_SIZE (type);
15529 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15530 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15531 tree index = bitsize_int (indexi);
15533 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15534 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15535 part_width, index);
15541 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15542 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15543 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15545 tree op00 = TREE_OPERAND (sub, 0);
15546 tree op01 = TREE_OPERAND (sub, 1);
15550 op00type = TREE_TYPE (op00);
15551 if (TREE_CODE (op00) == ADDR_EXPR
15552 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15553 && type == TREE_TYPE (TREE_TYPE (op00type)))
15555 tree size = TYPE_SIZE_UNIT (type);
15556 if (tree_int_cst_equal (size, op01))
15557 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15561 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15562 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15563 && type == TREE_TYPE (TREE_TYPE (subtype)))
15566 tree min_val = size_zero_node;
15567 sub = build_fold_indirect_ref (sub);
15568 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15569 if (type_domain && TYPE_MIN_VALUE (type_domain))
15570 min_val = TYPE_MIN_VALUE (type_domain);
15571 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15577 /* Builds an expression for an indirection through T, simplifying some
15581 build_fold_indirect_ref (tree t)
15583 tree type = TREE_TYPE (TREE_TYPE (t));
15584 tree sub = fold_indirect_ref_1 (type, t);
15589 return build1 (INDIRECT_REF, type, t);
15592 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15595 fold_indirect_ref (tree t)
15597 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15605 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15606 whose result is ignored. The type of the returned tree need not be
15607 the same as the original expression. */
15610 fold_ignored_result (tree t)
15612 if (!TREE_SIDE_EFFECTS (t))
15613 return integer_zero_node;
15616 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15619 t = TREE_OPERAND (t, 0);
15623 case tcc_comparison:
15624 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15625 t = TREE_OPERAND (t, 0);
15626 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15627 t = TREE_OPERAND (t, 1);
15632 case tcc_expression:
15633 switch (TREE_CODE (t))
15635 case COMPOUND_EXPR:
15636 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15638 t = TREE_OPERAND (t, 0);
15642 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15643 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15645 t = TREE_OPERAND (t, 0);
15658 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15659 This can only be applied to objects of a sizetype. */
15662 round_up (tree value, int divisor)
15664 tree div = NULL_TREE;
15666 gcc_assert (divisor > 0);
15670 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15671 have to do anything. Only do this when we are not given a const,
15672 because in that case, this check is more expensive than just
15674 if (TREE_CODE (value) != INTEGER_CST)
15676 div = build_int_cst (TREE_TYPE (value), divisor);
15678 if (multiple_of_p (TREE_TYPE (value), value, div))
15682 /* If divisor is a power of two, simplify this to bit manipulation. */
15683 if (divisor == (divisor & -divisor))
15685 if (TREE_CODE (value) == INTEGER_CST)
15687 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15688 unsigned HOST_WIDE_INT high;
15691 if ((low & (divisor - 1)) == 0)
15694 overflow_p = TREE_OVERFLOW (value);
15695 high = TREE_INT_CST_HIGH (value);
15696 low &= ~(divisor - 1);
15705 return force_fit_type_double (TREE_TYPE (value), low, high,
15712 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15713 value = size_binop (PLUS_EXPR, value, t);
15714 t = build_int_cst (TREE_TYPE (value), -divisor);
15715 value = size_binop (BIT_AND_EXPR, value, t);
15721 div = build_int_cst (TREE_TYPE (value), divisor);
15722 value = size_binop (CEIL_DIV_EXPR, value, div);
15723 value = size_binop (MULT_EXPR, value, div);
15729 /* Likewise, but round down. */
15732 round_down (tree value, int divisor)
15734 tree div = NULL_TREE;
15736 gcc_assert (divisor > 0);
15740 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15741 have to do anything. Only do this when we are not given a const,
15742 because in that case, this check is more expensive than just
15744 if (TREE_CODE (value) != INTEGER_CST)
15746 div = build_int_cst (TREE_TYPE (value), divisor);
15748 if (multiple_of_p (TREE_TYPE (value), value, div))
15752 /* If divisor is a power of two, simplify this to bit manipulation. */
15753 if (divisor == (divisor & -divisor))
15757 t = build_int_cst (TREE_TYPE (value), -divisor);
15758 value = size_binop (BIT_AND_EXPR, value, t);
15763 div = build_int_cst (TREE_TYPE (value), divisor);
15764 value = size_binop (FLOOR_DIV_EXPR, value, div);
15765 value = size_binop (MULT_EXPR, value, div);
15771 /* Returns the pointer to the base of the object addressed by EXP and
15772 extracts the information about the offset of the access, storing it
15773 to PBITPOS and POFFSET. */
15776 split_address_to_core_and_offset (tree exp,
15777 HOST_WIDE_INT *pbitpos, tree *poffset)
15780 enum machine_mode mode;
15781 int unsignedp, volatilep;
15782 HOST_WIDE_INT bitsize;
15784 if (TREE_CODE (exp) == ADDR_EXPR)
15786 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15787 poffset, &mode, &unsignedp, &volatilep,
15789 core = build_fold_addr_expr (core);
15795 *poffset = NULL_TREE;
15801 /* Returns true if addresses of E1 and E2 differ by a constant, false
15802 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15805 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15808 HOST_WIDE_INT bitpos1, bitpos2;
15809 tree toffset1, toffset2, tdiff, type;
15811 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15812 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15814 if (bitpos1 % BITS_PER_UNIT != 0
15815 || bitpos2 % BITS_PER_UNIT != 0
15816 || !operand_equal_p (core1, core2, 0))
15819 if (toffset1 && toffset2)
15821 type = TREE_TYPE (toffset1);
15822 if (type != TREE_TYPE (toffset2))
15823 toffset2 = fold_convert (type, toffset2);
15825 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15826 if (!cst_and_fits_in_hwi (tdiff))
15829 *diff = int_cst_value (tdiff);
15831 else if (toffset1 || toffset2)
15833 /* If only one of the offsets is non-constant, the difference cannot
15840 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15844 /* Simplify the floating point expression EXP when the sign of the
15845 result is not significant. Return NULL_TREE if no simplification
15849 fold_strip_sign_ops (tree exp)
15853 switch (TREE_CODE (exp))
15857 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15858 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15862 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15864 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15865 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15866 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15867 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15868 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15869 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15872 case COMPOUND_EXPR:
15873 arg0 = TREE_OPERAND (exp, 0);
15874 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15876 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15880 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15881 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15883 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15884 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15885 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15890 const enum built_in_function fcode = builtin_mathfn_code (exp);
15893 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15894 /* Strip copysign function call, return the 1st argument. */
15895 arg0 = CALL_EXPR_ARG (exp, 0);
15896 arg1 = CALL_EXPR_ARG (exp, 1);
15897 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15900 /* Strip sign ops from the argument of "odd" math functions. */
15901 if (negate_mathfn_p (fcode))
15903 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15905 return build_call_expr (get_callee_fndecl (exp), 1, arg0);