1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
71 int folding_initializer = 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
95 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
96 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
97 static bool negate_mathfn_p (enum built_in_function);
98 static bool negate_expr_p (tree);
99 static tree negate_expr (tree);
100 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
101 static tree associate_trees (tree, tree, enum tree_code, tree);
102 static tree const_binop (enum tree_code, tree, tree, int);
103 static enum comparison_code comparison_to_compcode (enum tree_code);
104 static enum tree_code compcode_to_comparison (enum comparison_code);
105 static tree combine_comparisons (enum tree_code, enum tree_code,
106 enum tree_code, tree, tree, tree);
107 static int truth_value_p (enum tree_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *, bool *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
198 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 unsigned HOST_WIDE_INT low0 = l1;
201 HOST_WIDE_INT high0 = h1;
203 int sign_extended_type;
205 if (POINTER_TYPE_P (type)
206 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = TYPE_PRECISION (type);
211 /* Size types *are* sign extended. */
212 sign_extended_type = (!TYPE_UNSIGNED (type)
213 || (TREE_CODE (type) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 if (prec < HOST_BITS_PER_WIDE_INT)
225 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type)
230 /* No sign extension */;
231 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
232 /* Correct width already. */;
233 else if (prec > HOST_BITS_PER_WIDE_INT)
235 /* Sign extend top half? */
236 if (h1 & ((unsigned HOST_WIDE_INT)1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 else if (prec == HOST_BITS_PER_WIDE_INT)
242 if ((HOST_WIDE_INT)l1 < 0)
247 /* Sign extend bottom half? */
248 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 /* If the value didn't fit, signal overflow. */
259 return l1 != low0 || h1 != high0;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
278 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
279 HOST_WIDE_INT high, int overflowable,
282 int sign_extended_type;
285 /* Size types *are* sign extended. */
286 sign_extended_type = (!TYPE_UNSIGNED (type)
287 || (TREE_CODE (type) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type)));
290 overflow = fit_double_type (low, high, &low, &high, type);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed || overflow)
297 || (overflowable > 0 && sign_extended_type))
299 tree t = make_node (INTEGER_CST);
300 TREE_INT_CST_LOW (t) = low;
301 TREE_INT_CST_HIGH (t) = high;
302 TREE_TYPE (t) = type;
303 TREE_OVERFLOW (t) = 1;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type, low, high);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 unsigned HOST_WIDE_INT l;
328 h = h1 + h2 + (l < l1);
334 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352 return (*hv & h1) < 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
374 HOST_WIDE_INT arg1[4];
375 HOST_WIDE_INT arg2[4];
376 HOST_WIDE_INT prod[4 * 2];
377 unsigned HOST_WIDE_INT carry;
379 unsigned HOST_WIDE_INT toplow, neglow;
380 HOST_WIDE_INT tophigh, neghigh;
382 encode (arg1, l1, h1);
383 encode (arg2, l2, h2);
385 memset (prod, 0, sizeof prod);
387 for (i = 0; i < 4; i++)
390 for (j = 0; j < 4; j++)
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry += arg1[i] * arg2[j];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 prod[k] = LOWPART (carry);
398 carry = HIGHPART (carry);
403 decode (prod, lv, hv);
404 decode (prod + 4, &toplow, &tophigh);
406 /* Unsigned overflow is immediate. */
408 return (toplow | tophigh) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
414 neg_double (l2, h2, &neglow, &neghigh);
415 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
419 neg_double (l1, h1, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
433 HOST_WIDE_INT count, unsigned int prec,
434 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 unsigned HOST_WIDE_INT signmask;
440 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 if (SHIFT_COUNT_TRUNCATED)
447 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
454 else if (count >= HOST_BITS_PER_WIDE_INT)
456 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
461 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
462 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 /* Sign extend all bits that are beyond the precision. */
468 signmask = -((prec > HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT) *hv
470 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
471 : (*lv >> (prec - 1))) & 1);
473 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
478 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
483 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
484 *lv |= signmask << prec;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
495 HOST_WIDE_INT count, unsigned int prec,
496 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
499 unsigned HOST_WIDE_INT signmask;
502 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
505 if (SHIFT_COUNT_TRUNCATED)
508 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
515 else if (count >= HOST_BITS_PER_WIDE_INT)
518 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
522 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count >= (HOST_WIDE_INT)prec)
534 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
539 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
544 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
545 *lv |= signmask << (prec - count);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
556 HOST_WIDE_INT count, unsigned int prec,
557 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 unsigned HOST_WIDE_INT s1l, s2l;
560 HOST_WIDE_INT s1h, s2h;
566 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
567 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
578 HOST_WIDE_INT count, unsigned int prec,
579 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 unsigned HOST_WIDE_INT s1l, s2l;
582 HOST_WIDE_INT s1h, s2h;
588 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
589 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code, int uns,
605 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig,
607 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig,
609 unsigned HOST_WIDE_INT *lquo,
610 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den[4], quo[4];
617 unsigned HOST_WIDE_INT work;
618 unsigned HOST_WIDE_INT carry = 0;
619 unsigned HOST_WIDE_INT lnum = lnum_orig;
620 HOST_WIDE_INT hnum = hnum_orig;
621 unsigned HOST_WIDE_INT lden = lden_orig;
622 HOST_WIDE_INT hden = hden_orig;
625 if (hden == 0 && lden == 0)
626 overflow = 1, lden = 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum, hnum, &lnum, &hnum)
636 && ((HOST_WIDE_INT) lden & hden) == -1)
642 neg_double (lden, hden, &lden, &hden);
646 if (hnum == 0 && hden == 0)
647 { /* single precision */
649 /* This unsigned division rounds toward zero. */
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
663 memset (quo, 0, sizeof quo);
665 memset (num, 0, sizeof num); /* to zero 9th element */
666 memset (den, 0, sizeof den);
668 encode (num, lnum, hnum);
669 encode (den, lden, hden);
671 /* Special code for when the divisor < BASE. */
672 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 /* hnum != 0 already checked. */
675 for (i = 4 - 1; i >= 0; i--)
677 work = num[i] + carry * BASE;
678 quo[i] = work / lden;
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig, den_hi_sig;
687 unsigned HOST_WIDE_INT quo_est, scale;
689 /* Find the highest nonzero divisor digit. */
690 for (i = 4 - 1;; i--)
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale = BASE / (den[den_hi_sig] + 1);
702 { /* scale divisor and dividend */
704 for (i = 0; i <= 4 - 1; i++)
706 work = (num[i] * scale) + carry;
707 num[i] = LOWPART (work);
708 carry = HIGHPART (work);
713 for (i = 0; i <= 4 - 1; i++)
715 work = (den[i] * scale) + carry;
716 den[i] = LOWPART (work);
717 carry = HIGHPART (work);
718 if (den[i] != 0) den_hi_sig = i;
725 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp;
732 num_hi_sig = i + den_hi_sig + 1;
733 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
734 if (num[num_hi_sig] != den[den_hi_sig])
735 quo_est = work / den[den_hi_sig];
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp = work - quo_est * den[den_hi_sig];
742 && (den[den_hi_sig - 1] * quo_est
743 > (tmp * BASE + num[num_hi_sig - 2])))
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
751 for (j = 0; j <= den_hi_sig; j++)
753 work = quo_est * den[j] + carry;
754 carry = HIGHPART (work);
755 work = num[i + j] - LOWPART (work);
756 num[i + j] = LOWPART (work);
757 carry += HIGHPART (work) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 carry = 0; /* add divisor back in */
766 for (j = 0; j <= den_hi_sig; j++)
768 work = num[i + j] + den[j] + carry;
769 carry = HIGHPART (work);
770 num[i + j] = LOWPART (work);
773 num [num_hi_sig] += carry;
776 /* Store the quotient digit. */
781 decode (quo, lquo, hquo);
784 /* If result is negative, make it so. */
786 neg_double (*lquo, *hquo, lquo, hquo);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
790 neg_double (*lrem, *hrem, lrem, hrem);
791 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
796 case TRUNC_MOD_EXPR: /* round toward zero */
797 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 case FLOOR_MOD_EXPR: /* round toward negative infinity */
802 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
813 case CEIL_MOD_EXPR: /* round toward positive infinity */
814 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
824 case ROUND_MOD_EXPR: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem = *lrem;
827 HOST_WIDE_INT habs_rem = *hrem;
828 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
829 HOST_WIDE_INT habs_den = hden, htwice;
831 /* Get absolute values. */
833 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 neg_double (lden, hden, &labs_den, &habs_den);
837 /* If (2 * abs (lrem) >= abs (lden)) */
838 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
839 labs_rem, habs_rem, <wice, &htwice);
841 if (((unsigned HOST_WIDE_INT) habs_den
842 < (unsigned HOST_WIDE_INT) htwice)
843 || (((unsigned HOST_WIDE_INT) habs_den
844 == (unsigned HOST_WIDE_INT) htwice)
845 && (labs_den < ltwice)))
849 add_double (*lquo, *hquo,
850 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
853 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
867 neg_double (*lrem, *hrem, lrem, hrem);
868 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
877 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 unsigned HOST_WIDE_INT int1l, int2l;
880 HOST_WIDE_INT int1h, int2h;
881 unsigned HOST_WIDE_INT quol, reml;
882 HOST_WIDE_INT quoh, remh;
883 tree type = TREE_TYPE (arg1);
884 int uns = TYPE_UNSIGNED (type);
886 int1l = TREE_INT_CST_LOW (arg1);
887 int1h = TREE_INT_CST_HIGH (arg1);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type))
893 type = signed_type_for (type);
894 fit_double_type (int1l, int1h, &int1l, &int1h,
898 fit_double_type (int1l, int1h, &int1l, &int1h, type);
899 int2l = TREE_INT_CST_LOW (arg2);
900 int2h = TREE_INT_CST_HIGH (arg2);
902 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
903 &quol, &quoh, &reml, &remh);
904 if (remh != 0 || reml != 0)
907 return build_int_cst_wide (type, quol, quoh);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
919 static int fold_deferring_overflow_warnings;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
952 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
957 gcc_assert (fold_deferring_overflow_warnings > 0);
958 --fold_deferring_overflow_warnings;
959 if (fold_deferring_overflow_warnings > 0)
961 if (fold_deferred_overflow_warning != NULL
963 && code < (int) fold_deferred_overflow_code)
964 fold_deferred_overflow_code = code;
968 warnmsg = fold_deferred_overflow_warning;
969 fold_deferred_overflow_warning = NULL;
971 if (!issue || warnmsg == NULL)
974 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
977 /* Use the smallest code level when deciding to issue the
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 gcc_assert (!flag_wrapv && !flag_trapv);
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1198 tree tem = strip_float_extensions (t);
1200 return negate_expr_p (tem);
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1234 fold_negate_expr (tree t)
1236 tree type = TREE_TYPE (t);
1239 switch (TREE_CODE (t))
1241 /* Convert - (~A) to A + 1. */
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 tem = fold_negate_const (t, type);
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1280 if (negate_expr_p (t))
1281 return fold_build2 (COMPLEX_EXPR, type,
1282 fold_negate_expr (TREE_OPERAND (t, 0)),
1283 fold_negate_expr (TREE_OPERAND (t, 1)));
1287 if (negate_expr_p (t))
1288 return fold_build1 (CONJ_EXPR, type,
1289 fold_negate_expr (TREE_OPERAND (t, 0)));
1293 return TREE_OPERAND (t, 0);
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2 (MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2 (MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2 (MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1329 if (TYPE_UNSIGNED (type))
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2 (TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2 (TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2 (TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert (type, negate_expr (tem));
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr (fndecl, 1, arg);
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert (type, temp);
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1439 negate_expr (tree t)
1446 type = TREE_TYPE (t);
1447 STRIP_SIGN_NOPS (t);
1449 tem = fold_negate_expr (t);
1451 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1452 return fold_convert (type, tem);
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1470 If IN is itself a literal or constant, return it as appropriate.
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1476 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1477 tree *minus_litp, int negate_p)
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in);
1488 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1489 || TREE_CODE (in) == FIXED_CST)
1491 else if (TREE_CODE (in) == code
1492 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1499 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1501 tree op0 = TREE_OPERAND (in, 0);
1502 tree op1 = TREE_OPERAND (in, 1);
1503 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1504 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1508 || TREE_CODE (op0) == FIXED_CST)
1509 *litp = op0, op0 = 0;
1510 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1511 || TREE_CODE (op1) == FIXED_CST)
1512 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1514 if (op0 != 0 && TREE_CONSTANT (op0))
1515 *conp = op0, op0 = 0;
1516 else if (op1 != 0 && TREE_CONSTANT (op1))
1517 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0 != 0 && op1 != 0)
1526 var = op1, neg_var_p = neg1_p;
1528 /* Now do any needed negations. */
1530 *minus_litp = *litp, *litp = 0;
1532 *conp = negate_expr (*conp);
1534 var = negate_expr (var);
1536 else if (TREE_CONSTANT (in))
1544 *minus_litp = *litp, *litp = 0;
1545 else if (*minus_litp)
1546 *litp = *minus_litp, *minus_litp = 0;
1547 *conp = negate_expr (*conp);
1548 var = negate_expr (var);
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1559 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1570 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1572 if (code == PLUS_EXPR)
1574 if (TREE_CODE (t1) == NEGATE_EXPR)
1575 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1576 fold_convert (type, TREE_OPERAND (t1, 0)));
1577 else if (TREE_CODE (t2) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1579 fold_convert (type, TREE_OPERAND (t2, 0)));
1580 else if (integer_zerop (t2))
1581 return fold_convert (type, t1);
1583 else if (code == MINUS_EXPR)
1585 if (integer_zerop (t2))
1586 return fold_convert (type, t1);
1589 return build2 (code, type, fold_convert (type, t1),
1590 fold_convert (type, t2));
1593 return fold_build2 (code, type, fold_convert (type, t1),
1594 fold_convert (type, t2));
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1601 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1603 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1620 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1621 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1622 && TYPE_MODE (type1) == TYPE_MODE (type2);
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1633 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1635 unsigned HOST_WIDE_INT int1l, int2l;
1636 HOST_WIDE_INT int1h, int2h;
1637 unsigned HOST_WIDE_INT low;
1639 unsigned HOST_WIDE_INT garbagel;
1640 HOST_WIDE_INT garbageh;
1642 tree type = TREE_TYPE (arg1);
1643 int uns = TYPE_UNSIGNED (type);
1645 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1648 int1l = TREE_INT_CST_LOW (arg1);
1649 int1h = TREE_INT_CST_HIGH (arg1);
1650 int2l = TREE_INT_CST_LOW (arg2);
1651 int2h = TREE_INT_CST_HIGH (arg2);
1656 low = int1l | int2l, hi = int1h | int2h;
1660 low = int1l ^ int2l, hi = int1h ^ int2h;
1664 low = int1l & int2l, hi = int1h & int2h;
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1685 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 neg_double (int2l, int2h, &low, &hi);
1690 add_double (int1l, int1h, low, hi, &low, &hi);
1691 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1695 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 if (code == CEIL_DIV_EXPR)
1710 low = int1l / int2l, hi = 0;
1714 /* ... fall through ... */
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1719 if (int2h == 0 && int2l == 1)
1721 low = int1l, hi = int1h;
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 if (code == CEIL_MOD_EXPR)
1744 low = int1l % int2l, hi = 0;
1748 /* ... fall through ... */
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1773 low = int2l, hi = int2h;
1782 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1789 TREE_OVERFLOW (t) = 1;
1793 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1794 ((!uns || is_sizetype) && overflow)
1795 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1808 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1810 /* Sanity check for the recursive cases. */
1817 if (TREE_CODE (arg1) == INTEGER_CST)
1818 return int_const_binop (code, arg1, arg2, notrunc);
1820 if (TREE_CODE (arg1) == REAL_CST)
1822 enum machine_mode mode;
1825 REAL_VALUE_TYPE value;
1826 REAL_VALUE_TYPE result;
1830 /* The following codes are handled by real_arithmetic. */
1845 d1 = TREE_REAL_CST (arg1);
1846 d2 = TREE_REAL_CST (arg2);
1848 type = TREE_TYPE (arg1);
1849 mode = TYPE_MODE (type);
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode)
1854 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code == RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2, dconst0)
1861 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1))
1868 else if (REAL_VALUE_ISNAN (d2))
1871 inexact = real_arithmetic (&value, code, &d1, &d2);
1872 real_convert (&result, mode, &value);
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode)
1878 && REAL_VALUE_ISINF (result)
1879 && !REAL_VALUE_ISINF (d1)
1880 && !REAL_VALUE_ISINF (d2))
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1889 && !flag_unsafe_math_optimizations))
1890 && (inexact || !real_identical (&result, &value)))
1893 t = build_real (type, result);
1895 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1899 if (TREE_CODE (arg1) == FIXED_CST)
1901 FIXED_VALUE_TYPE f1;
1902 FIXED_VALUE_TYPE f2;
1903 FIXED_VALUE_TYPE result;
1908 /* The following codes are handled by fixed_arithmetic. */
1914 case TRUNC_DIV_EXPR:
1915 f2 = TREE_FIXED_CST (arg2);
1920 f2.data.high = TREE_INT_CST_HIGH (arg2);
1921 f2.data.low = TREE_INT_CST_LOW (arg2);
1929 f1 = TREE_FIXED_CST (arg1);
1930 type = TREE_TYPE (arg1);
1931 sat_p = TYPE_SATURATING (type);
1932 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1933 t = build_fixed (type, result);
1934 /* Propagate overflow flags. */
1935 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1937 TREE_OVERFLOW (t) = 1;
1938 TREE_CONSTANT_OVERFLOW (t) = 1;
1940 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1941 TREE_CONSTANT_OVERFLOW (t) = 1;
1945 if (TREE_CODE (arg1) == COMPLEX_CST)
1947 tree type = TREE_TYPE (arg1);
1948 tree r1 = TREE_REALPART (arg1);
1949 tree i1 = TREE_IMAGPART (arg1);
1950 tree r2 = TREE_REALPART (arg2);
1951 tree i2 = TREE_IMAGPART (arg2);
1958 real = const_binop (code, r1, r2, notrunc);
1959 imag = const_binop (code, i1, i2, notrunc);
1963 real = const_binop (MINUS_EXPR,
1964 const_binop (MULT_EXPR, r1, r2, notrunc),
1965 const_binop (MULT_EXPR, i1, i2, notrunc),
1967 imag = const_binop (PLUS_EXPR,
1968 const_binop (MULT_EXPR, r1, i2, notrunc),
1969 const_binop (MULT_EXPR, i1, r2, notrunc),
1976 = const_binop (PLUS_EXPR,
1977 const_binop (MULT_EXPR, r2, r2, notrunc),
1978 const_binop (MULT_EXPR, i2, i2, notrunc),
1981 = const_binop (PLUS_EXPR,
1982 const_binop (MULT_EXPR, r1, r2, notrunc),
1983 const_binop (MULT_EXPR, i1, i2, notrunc),
1986 = const_binop (MINUS_EXPR,
1987 const_binop (MULT_EXPR, i1, r2, notrunc),
1988 const_binop (MULT_EXPR, r1, i2, notrunc),
1991 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1992 code = TRUNC_DIV_EXPR;
1994 real = const_binop (code, t1, magsquared, notrunc);
1995 imag = const_binop (code, t2, magsquared, notrunc);
2004 return build_complex (type, real, imag);
2010 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2011 indicates which particular sizetype to create. */
2014 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2016 return build_int_cst (sizetype_tab[(int) kind], number);
2019 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2020 is a tree code. The type of the result is taken from the operands.
2021 Both must be equivalent integer types, ala int_binop_types_match_p.
2022 If the operands are constant, so is the result. */
2025 size_binop (enum tree_code code, tree arg0, tree arg1)
2027 tree type = TREE_TYPE (arg0);
2029 if (arg0 == error_mark_node || arg1 == error_mark_node)
2030 return error_mark_node;
2032 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2035 /* Handle the special case of two integer constants faster. */
2036 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2038 /* And some specific cases even faster than that. */
2039 if (code == PLUS_EXPR)
2041 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2043 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2046 else if (code == MINUS_EXPR)
2048 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2051 else if (code == MULT_EXPR)
2053 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2057 /* Handle general case of two integer constants. */
2058 return int_const_binop (code, arg0, arg1, 0);
2061 return fold_build2 (code, type, arg0, arg1);
2064 /* Given two values, either both of sizetype or both of bitsizetype,
2065 compute the difference between the two values. Return the value
2066 in signed type corresponding to the type of the operands. */
2069 size_diffop (tree arg0, tree arg1)
2071 tree type = TREE_TYPE (arg0);
2074 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2077 /* If the type is already signed, just do the simple thing. */
2078 if (!TYPE_UNSIGNED (type))
2079 return size_binop (MINUS_EXPR, arg0, arg1);
2081 if (type == sizetype)
2083 else if (type == bitsizetype)
2084 ctype = sbitsizetype;
2086 ctype = signed_type_for (type);
2088 /* If either operand is not a constant, do the conversions to the signed
2089 type and subtract. The hardware will do the right thing with any
2090 overflow in the subtraction. */
2091 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2092 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2093 fold_convert (ctype, arg1));
2095 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2096 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2097 overflow) and negate (which can't either). Special-case a result
2098 of zero while we're here. */
2099 if (tree_int_cst_equal (arg0, arg1))
2100 return build_int_cst (ctype, 0);
2101 else if (tree_int_cst_lt (arg1, arg0))
2102 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2104 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2105 fold_convert (ctype, size_binop (MINUS_EXPR,
2109 /* A subroutine of fold_convert_const handling conversions of an
2110 INTEGER_CST to another integer type. */
2113 fold_convert_const_int_from_int (tree type, const_tree arg1)
2117 /* Given an integer constant, make new constant with new type,
2118 appropriately sign-extended or truncated. */
2119 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2120 TREE_INT_CST_HIGH (arg1),
2121 /* Don't set the overflow when
2122 converting from a pointer, */
2123 !POINTER_TYPE_P (TREE_TYPE (arg1))
2124 /* or to a sizetype with same signedness
2125 and the precision is unchanged.
2126 ??? sizetype is always sign-extended,
2127 but its signedness depends on the
2128 frontend. Thus we see spurious overflows
2129 here if we do not check this. */
2130 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2131 == TYPE_PRECISION (type))
2132 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2133 == TYPE_UNSIGNED (type))
2134 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2135 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2136 || (TREE_CODE (type) == INTEGER_TYPE
2137 && TYPE_IS_SIZETYPE (type)))),
2138 (TREE_INT_CST_HIGH (arg1) < 0
2139 && (TYPE_UNSIGNED (type)
2140 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2141 | TREE_OVERFLOW (arg1));
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to an integer type. */
2150 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2155 /* The following code implements the floating point to integer
2156 conversion rules required by the Java Language Specification,
2157 that IEEE NaNs are mapped to zero and values that overflow
2158 the target precision saturate, i.e. values greater than
2159 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2160 are mapped to INT_MIN. These semantics are allowed by the
2161 C and C++ standards that simply state that the behavior of
2162 FP-to-integer conversion is unspecified upon overflow. */
2164 HOST_WIDE_INT high, low;
2166 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2170 case FIX_TRUNC_EXPR:
2171 real_trunc (&r, VOIDmode, &x);
2178 /* If R is NaN, return zero and show we have an overflow. */
2179 if (REAL_VALUE_ISNAN (r))
2186 /* See if R is less than the lower bound or greater than the
2191 tree lt = TYPE_MIN_VALUE (type);
2192 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2193 if (REAL_VALUES_LESS (r, l))
2196 high = TREE_INT_CST_HIGH (lt);
2197 low = TREE_INT_CST_LOW (lt);
2203 tree ut = TYPE_MAX_VALUE (type);
2206 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2207 if (REAL_VALUES_LESS (u, r))
2210 high = TREE_INT_CST_HIGH (ut);
2211 low = TREE_INT_CST_LOW (ut);
2217 REAL_VALUE_TO_INT (&low, &high, r);
2219 t = force_fit_type_double (type, low, high, -1,
2220 overflow | TREE_OVERFLOW (arg1));
2224 /* A subroutine of fold_convert_const handling conversions of a
2225 FIXED_CST to an integer type. */
2228 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2231 double_int temp, temp_trunc;
2234 /* Right shift FIXED_CST to temp by fbit. */
2235 temp = TREE_FIXED_CST (arg1).data;
2236 mode = TREE_FIXED_CST (arg1).mode;
2237 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2239 lshift_double (temp.low, temp.high,
2240 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2241 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2243 /* Left shift temp to temp_trunc by fbit. */
2244 lshift_double (temp.low, temp.high,
2245 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2246 &temp_trunc.low, &temp_trunc.high,
2247 SIGNED_FIXED_POINT_MODE_P (mode));
2254 temp_trunc.high = 0;
2257 /* If FIXED_CST is negative, we need to round the value toward 0.
2258 By checking if the fractional bits are not zero to add 1 to temp. */
2259 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2260 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2265 temp = double_int_add (temp, one);
2268 /* Given a fixed-point constant, make new constant with new type,
2269 appropriately sign-extended or truncated. */
2270 t = force_fit_type_double (type, temp.low, temp.high, -1,
2272 && (TYPE_UNSIGNED (type)
2273 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2274 | TREE_OVERFLOW (arg1));
2279 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2280 to another floating point type. */
2283 fold_convert_const_real_from_real (tree type, const_tree arg1)
2285 REAL_VALUE_TYPE value;
2288 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2289 t = build_real (type, value);
2291 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2295 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2296 to a floating point type. */
2299 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2301 REAL_VALUE_TYPE value;
2304 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2305 t = build_real (type, value);
2307 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2308 TREE_CONSTANT_OVERFLOW (t)
2309 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2313 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2314 to another fixed-point type. */
2317 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2319 FIXED_VALUE_TYPE value;
2323 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2324 TYPE_SATURATING (type));
2325 t = build_fixed (type, value);
2327 /* Propagate overflow flags. */
2328 if (overflow_p | TREE_OVERFLOW (arg1))
2330 TREE_OVERFLOW (t) = 1;
2331 TREE_CONSTANT_OVERFLOW (t) = 1;
2333 else if (TREE_CONSTANT_OVERFLOW (arg1))
2334 TREE_CONSTANT_OVERFLOW (t) = 1;
2338 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2339 to a fixed-point type. */
2342 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2344 FIXED_VALUE_TYPE value;
2348 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2349 TREE_INT_CST (arg1),
2350 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2351 TYPE_SATURATING (type));
2352 t = build_fixed (type, value);
2354 /* Propagate overflow flags. */
2355 if (overflow_p | TREE_OVERFLOW (arg1))
2357 TREE_OVERFLOW (t) = 1;
2358 TREE_CONSTANT_OVERFLOW (t) = 1;
2360 else if (TREE_CONSTANT_OVERFLOW (arg1))
2361 TREE_CONSTANT_OVERFLOW (t) = 1;
2365 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2366 to a fixed-point type. */
2369 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2371 FIXED_VALUE_TYPE value;
2375 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2376 &TREE_REAL_CST (arg1),
2377 TYPE_SATURATING (type));
2378 t = build_fixed (type, value);
2380 /* Propagate overflow flags. */
2381 if (overflow_p | TREE_OVERFLOW (arg1))
2383 TREE_OVERFLOW (t) = 1;
2384 TREE_CONSTANT_OVERFLOW (t) = 1;
2386 else if (TREE_CONSTANT_OVERFLOW (arg1))
2387 TREE_CONSTANT_OVERFLOW (t) = 1;
2391 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2392 type TYPE. If no simplification can be done return NULL_TREE. */
2395 fold_convert_const (enum tree_code code, tree type, tree arg1)
2397 if (TREE_TYPE (arg1) == type)
2400 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2402 if (TREE_CODE (arg1) == INTEGER_CST)
2403 return fold_convert_const_int_from_int (type, arg1);
2404 else if (TREE_CODE (arg1) == REAL_CST)
2405 return fold_convert_const_int_from_real (code, type, arg1);
2406 else if (TREE_CODE (arg1) == FIXED_CST)
2407 return fold_convert_const_int_from_fixed (type, arg1);
2409 else if (TREE_CODE (type) == REAL_TYPE)
2411 if (TREE_CODE (arg1) == INTEGER_CST)
2412 return build_real_from_int_cst (type, arg1);
2413 else if (TREE_CODE (arg1) == REAL_CST)
2414 return fold_convert_const_real_from_real (type, arg1);
2415 else if (TREE_CODE (arg1) == FIXED_CST)
2416 return fold_convert_const_real_from_fixed (type, arg1);
2418 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2420 if (TREE_CODE (arg1) == FIXED_CST)
2421 return fold_convert_const_fixed_from_fixed (type, arg1);
2422 else if (TREE_CODE (arg1) == INTEGER_CST)
2423 return fold_convert_const_fixed_from_int (type, arg1);
2424 else if (TREE_CODE (arg1) == REAL_CST)
2425 return fold_convert_const_fixed_from_real (type, arg1);
2430 /* Construct a vector of zero elements of vector type TYPE. */
2433 build_zero_vector (tree type)
2438 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2439 units = TYPE_VECTOR_SUBPARTS (type);
2442 for (i = 0; i < units; i++)
2443 list = tree_cons (NULL_TREE, elem, list);
2444 return build_vector (type, list);
2447 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2450 fold_convertible_p (const_tree type, const_tree arg)
2452 tree orig = TREE_TYPE (arg);
2457 if (TREE_CODE (arg) == ERROR_MARK
2458 || TREE_CODE (type) == ERROR_MARK
2459 || TREE_CODE (orig) == ERROR_MARK)
2462 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2465 switch (TREE_CODE (type))
2467 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2468 case POINTER_TYPE: case REFERENCE_TYPE:
2470 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2471 || TREE_CODE (orig) == OFFSET_TYPE)
2473 return (TREE_CODE (orig) == VECTOR_TYPE
2474 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2477 case FIXED_POINT_TYPE:
2481 return TREE_CODE (type) == TREE_CODE (orig);
2488 /* Convert expression ARG to type TYPE. Used by the middle-end for
2489 simple conversions in preference to calling the front-end's convert. */
2492 fold_convert (tree type, tree arg)
2494 tree orig = TREE_TYPE (arg);
2500 if (TREE_CODE (arg) == ERROR_MARK
2501 || TREE_CODE (type) == ERROR_MARK
2502 || TREE_CODE (orig) == ERROR_MARK)
2503 return error_mark_node;
2505 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2506 return fold_build1 (NOP_EXPR, type, arg);
2508 switch (TREE_CODE (type))
2510 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2513 if (TREE_CODE (arg) == INTEGER_CST)
2515 tem = fold_convert_const (NOP_EXPR, type, arg);
2516 if (tem != NULL_TREE)
2519 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 || TREE_CODE (orig) == OFFSET_TYPE)
2521 return fold_build1 (NOP_EXPR, type, arg);
2522 if (TREE_CODE (orig) == COMPLEX_TYPE)
2524 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2525 return fold_convert (type, tem);
2527 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2528 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2529 return fold_build1 (NOP_EXPR, type, arg);
2532 if (TREE_CODE (arg) == INTEGER_CST)
2534 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2535 if (tem != NULL_TREE)
2538 else if (TREE_CODE (arg) == REAL_CST)
2540 tem = fold_convert_const (NOP_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2544 else if (TREE_CODE (arg) == FIXED_CST)
2546 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2551 switch (TREE_CODE (orig))
2554 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2555 case POINTER_TYPE: case REFERENCE_TYPE:
2556 return fold_build1 (FLOAT_EXPR, type, arg);
2559 return fold_build1 (NOP_EXPR, type, arg);
2561 case FIXED_POINT_TYPE:
2562 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2565 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2566 return fold_convert (type, tem);
2572 case FIXED_POINT_TYPE:
2573 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2574 || TREE_CODE (arg) == REAL_CST)
2576 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2577 if (tem != NULL_TREE)
2581 switch (TREE_CODE (orig))
2583 case FIXED_POINT_TYPE:
2588 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2591 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2592 return fold_convert (type, tem);
2599 switch (TREE_CODE (orig))
2602 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2605 case FIXED_POINT_TYPE:
2606 return build2 (COMPLEX_EXPR, type,
2607 fold_convert (TREE_TYPE (type), arg),
2608 fold_convert (TREE_TYPE (type), integer_zero_node));
2613 if (TREE_CODE (arg) == COMPLEX_EXPR)
2615 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2616 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2617 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2620 arg = save_expr (arg);
2621 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2622 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2623 rpart = fold_convert (TREE_TYPE (type), rpart);
2624 ipart = fold_convert (TREE_TYPE (type), ipart);
2625 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2633 if (integer_zerop (arg))
2634 return build_zero_vector (type);
2635 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2636 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2637 || TREE_CODE (orig) == VECTOR_TYPE);
2638 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2641 tem = fold_ignored_result (arg);
2642 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2644 return fold_build1 (NOP_EXPR, type, tem);
2651 /* Return false if expr can be assumed not to be an lvalue, true
2655 maybe_lvalue_p (const_tree x)
2657 /* We only need to wrap lvalue tree codes. */
2658 switch (TREE_CODE (x))
2669 case ALIGN_INDIRECT_REF:
2670 case MISALIGNED_INDIRECT_REF:
2672 case ARRAY_RANGE_REF:
2678 case PREINCREMENT_EXPR:
2679 case PREDECREMENT_EXPR:
2681 case TRY_CATCH_EXPR:
2682 case WITH_CLEANUP_EXPR:
2685 case GIMPLE_MODIFY_STMT:
2694 /* Assume the worst for front-end tree codes. */
2695 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2703 /* Return an expr equal to X but certainly not valid as an lvalue. */
2708 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2713 if (! maybe_lvalue_p (x))
2715 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2718 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2719 Zero means allow extended lvalues. */
2721 int pedantic_lvalues;
2723 /* When pedantic, return an expr equal to X but certainly not valid as a
2724 pedantic lvalue. Otherwise, return X. */
2727 pedantic_non_lvalue (tree x)
2729 if (pedantic_lvalues)
2730 return non_lvalue (x);
2735 /* Given a tree comparison code, return the code that is the logical inverse
2736 of the given code. It is not safe to do this for floating-point
2737 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2738 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2741 invert_tree_comparison (enum tree_code code, bool honor_nans)
2743 if (honor_nans && flag_trapping_math)
2753 return honor_nans ? UNLE_EXPR : LE_EXPR;
2755 return honor_nans ? UNLT_EXPR : LT_EXPR;
2757 return honor_nans ? UNGE_EXPR : GE_EXPR;
2759 return honor_nans ? UNGT_EXPR : GT_EXPR;
2773 return UNORDERED_EXPR;
2774 case UNORDERED_EXPR:
2775 return ORDERED_EXPR;
2781 /* Similar, but return the comparison that results if the operands are
2782 swapped. This is safe for floating-point. */
2785 swap_tree_comparison (enum tree_code code)
2792 case UNORDERED_EXPR:
2818 /* Convert a comparison tree code from an enum tree_code representation
2819 into a compcode bit-based encoding. This function is the inverse of
2820 compcode_to_comparison. */
2822 static enum comparison_code
2823 comparison_to_compcode (enum tree_code code)
2840 return COMPCODE_ORD;
2841 case UNORDERED_EXPR:
2842 return COMPCODE_UNORD;
2844 return COMPCODE_UNLT;
2846 return COMPCODE_UNEQ;
2848 return COMPCODE_UNLE;
2850 return COMPCODE_UNGT;
2852 return COMPCODE_LTGT;
2854 return COMPCODE_UNGE;
2860 /* Convert a compcode bit-based encoding of a comparison operator back
2861 to GCC's enum tree_code representation. This function is the
2862 inverse of comparison_to_compcode. */
2864 static enum tree_code
2865 compcode_to_comparison (enum comparison_code code)
2882 return ORDERED_EXPR;
2883 case COMPCODE_UNORD:
2884 return UNORDERED_EXPR;
2902 /* Return a tree for the comparison which is the combination of
2903 doing the AND or OR (depending on CODE) of the two operations LCODE
2904 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2905 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2906 if this makes the transformation invalid. */
2909 combine_comparisons (enum tree_code code, enum tree_code lcode,
2910 enum tree_code rcode, tree truth_type,
2911 tree ll_arg, tree lr_arg)
2913 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2914 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2915 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2916 enum comparison_code compcode;
2920 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2921 compcode = lcompcode & rcompcode;
2924 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2925 compcode = lcompcode | rcompcode;
2934 /* Eliminate unordered comparisons, as well as LTGT and ORD
2935 which are not used unless the mode has NaNs. */
2936 compcode &= ~COMPCODE_UNORD;
2937 if (compcode == COMPCODE_LTGT)
2938 compcode = COMPCODE_NE;
2939 else if (compcode == COMPCODE_ORD)
2940 compcode = COMPCODE_TRUE;
2942 else if (flag_trapping_math)
2944 /* Check that the original operation and the optimized ones will trap
2945 under the same condition. */
2946 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2947 && (lcompcode != COMPCODE_EQ)
2948 && (lcompcode != COMPCODE_ORD);
2949 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2950 && (rcompcode != COMPCODE_EQ)
2951 && (rcompcode != COMPCODE_ORD);
2952 bool trap = (compcode & COMPCODE_UNORD) == 0
2953 && (compcode != COMPCODE_EQ)
2954 && (compcode != COMPCODE_ORD);
2956 /* In a short-circuited boolean expression the LHS might be
2957 such that the RHS, if evaluated, will never trap. For
2958 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2959 if neither x nor y is NaN. (This is a mixed blessing: for
2960 example, the expression above will never trap, hence
2961 optimizing it to x < y would be invalid). */
2962 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2963 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2966 /* If the comparison was short-circuited, and only the RHS
2967 trapped, we may now generate a spurious trap. */
2969 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2972 /* If we changed the conditions that cause a trap, we lose. */
2973 if ((ltrap || rtrap) != trap)
2977 if (compcode == COMPCODE_TRUE)
2978 return constant_boolean_node (true, truth_type);
2979 else if (compcode == COMPCODE_FALSE)
2980 return constant_boolean_node (false, truth_type);
2982 return fold_build2 (compcode_to_comparison (compcode),
2983 truth_type, ll_arg, lr_arg);
2986 /* Return nonzero if CODE is a tree code that represents a truth value. */
2989 truth_value_p (enum tree_code code)
2991 return (TREE_CODE_CLASS (code) == tcc_comparison
2992 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2993 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2994 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2997 /* Return nonzero if two operands (typically of the same tree node)
2998 are necessarily equal. If either argument has side-effects this
2999 function returns zero. FLAGS modifies behavior as follows:
3001 If OEP_ONLY_CONST is set, only return nonzero for constants.
3002 This function tests whether the operands are indistinguishable;
3003 it does not test whether they are equal using C's == operation.
3004 The distinction is important for IEEE floating point, because
3005 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3006 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3008 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3009 even though it may hold multiple values during a function.
3010 This is because a GCC tree node guarantees that nothing else is
3011 executed between the evaluation of its "operands" (which may often
3012 be evaluated in arbitrary order). Hence if the operands themselves
3013 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3014 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3015 unset means assuming isochronic (or instantaneous) tree equivalence.
3016 Unless comparing arbitrary expression trees, such as from different
3017 statements, this flag can usually be left unset.
3019 If OEP_PURE_SAME is set, then pure functions with identical arguments
3020 are considered the same. It is used when the caller has other ways
3021 to ensure that global memory is unchanged in between. */
3024 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3026 /* If either is ERROR_MARK, they aren't equal. */
3027 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3030 /* If both types don't have the same signedness, then we can't consider
3031 them equal. We must check this before the STRIP_NOPS calls
3032 because they may change the signedness of the arguments. */
3033 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3036 /* If both types don't have the same precision, then it is not safe
3038 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3044 /* In case both args are comparisons but with different comparison
3045 code, try to swap the comparison operands of one arg to produce
3046 a match and compare that variant. */
3047 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3048 && COMPARISON_CLASS_P (arg0)
3049 && COMPARISON_CLASS_P (arg1))
3051 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3053 if (TREE_CODE (arg0) == swap_code)
3054 return operand_equal_p (TREE_OPERAND (arg0, 0),
3055 TREE_OPERAND (arg1, 1), flags)
3056 && operand_equal_p (TREE_OPERAND (arg0, 1),
3057 TREE_OPERAND (arg1, 0), flags);
3060 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3061 /* This is needed for conversions and for COMPONENT_REF.
3062 Might as well play it safe and always test this. */
3063 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3064 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3065 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3068 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3069 We don't care about side effects in that case because the SAVE_EXPR
3070 takes care of that for us. In all other cases, two expressions are
3071 equal if they have no side effects. If we have two identical
3072 expressions with side effects that should be treated the same due
3073 to the only side effects being identical SAVE_EXPR's, that will
3074 be detected in the recursive calls below. */
3075 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3076 && (TREE_CODE (arg0) == SAVE_EXPR
3077 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3080 /* Next handle constant cases, those for which we can return 1 even
3081 if ONLY_CONST is set. */
3082 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3083 switch (TREE_CODE (arg0))
3086 return tree_int_cst_equal (arg0, arg1);
3089 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3090 TREE_FIXED_CST (arg1));
3093 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3094 TREE_REAL_CST (arg1)))
3098 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3100 /* If we do not distinguish between signed and unsigned zero,
3101 consider them equal. */
3102 if (real_zerop (arg0) && real_zerop (arg1))
3111 v1 = TREE_VECTOR_CST_ELTS (arg0);
3112 v2 = TREE_VECTOR_CST_ELTS (arg1);
3115 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3118 v1 = TREE_CHAIN (v1);
3119 v2 = TREE_CHAIN (v2);
3126 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3128 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3132 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3133 && ! memcmp (TREE_STRING_POINTER (arg0),
3134 TREE_STRING_POINTER (arg1),
3135 TREE_STRING_LENGTH (arg0)));
3138 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3144 if (flags & OEP_ONLY_CONST)
3147 /* Define macros to test an operand from arg0 and arg1 for equality and a
3148 variant that allows null and views null as being different from any
3149 non-null value. In the latter case, if either is null, the both
3150 must be; otherwise, do the normal comparison. */
3151 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3152 TREE_OPERAND (arg1, N), flags)
3154 #define OP_SAME_WITH_NULL(N) \
3155 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3156 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3158 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3161 /* Two conversions are equal only if signedness and modes match. */
3162 switch (TREE_CODE (arg0))
3166 case FIX_TRUNC_EXPR:
3167 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3168 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3178 case tcc_comparison:
3180 if (OP_SAME (0) && OP_SAME (1))
3183 /* For commutative ops, allow the other order. */
3184 return (commutative_tree_code (TREE_CODE (arg0))
3185 && operand_equal_p (TREE_OPERAND (arg0, 0),
3186 TREE_OPERAND (arg1, 1), flags)
3187 && operand_equal_p (TREE_OPERAND (arg0, 1),
3188 TREE_OPERAND (arg1, 0), flags));
3191 /* If either of the pointer (or reference) expressions we are
3192 dereferencing contain a side effect, these cannot be equal. */
3193 if (TREE_SIDE_EFFECTS (arg0)
3194 || TREE_SIDE_EFFECTS (arg1))
3197 switch (TREE_CODE (arg0))
3200 case ALIGN_INDIRECT_REF:
3201 case MISALIGNED_INDIRECT_REF:
3207 case ARRAY_RANGE_REF:
3208 /* Operands 2 and 3 may be null.
3209 Compare the array index by value if it is constant first as we
3210 may have different types but same value here. */
3212 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3213 TREE_OPERAND (arg1, 1))
3215 && OP_SAME_WITH_NULL (2)
3216 && OP_SAME_WITH_NULL (3));
3219 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3220 may be NULL when we're called to compare MEM_EXPRs. */
3221 return OP_SAME_WITH_NULL (0)
3223 && OP_SAME_WITH_NULL (2);
3226 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3232 case tcc_expression:
3233 switch (TREE_CODE (arg0))
3236 case TRUTH_NOT_EXPR:
3239 case TRUTH_ANDIF_EXPR:
3240 case TRUTH_ORIF_EXPR:
3241 return OP_SAME (0) && OP_SAME (1);
3243 case TRUTH_AND_EXPR:
3245 case TRUTH_XOR_EXPR:
3246 if (OP_SAME (0) && OP_SAME (1))
3249 /* Otherwise take into account this is a commutative operation. */
3250 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3251 TREE_OPERAND (arg1, 1), flags)
3252 && operand_equal_p (TREE_OPERAND (arg0, 1),
3253 TREE_OPERAND (arg1, 0), flags));
3260 switch (TREE_CODE (arg0))
3263 /* If the CALL_EXPRs call different functions, then they
3264 clearly can not be equal. */
3265 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3270 unsigned int cef = call_expr_flags (arg0);
3271 if (flags & OEP_PURE_SAME)
3272 cef &= ECF_CONST | ECF_PURE;
3279 /* Now see if all the arguments are the same. */
3281 const_call_expr_arg_iterator iter0, iter1;
3283 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3284 a1 = first_const_call_expr_arg (arg1, &iter1);
3286 a0 = next_const_call_expr_arg (&iter0),
3287 a1 = next_const_call_expr_arg (&iter1))
3288 if (! operand_equal_p (a0, a1, flags))
3291 /* If we get here and both argument lists are exhausted
3292 then the CALL_EXPRs are equal. */
3293 return ! (a0 || a1);
3299 case tcc_declaration:
3300 /* Consider __builtin_sqrt equal to sqrt. */
3301 return (TREE_CODE (arg0) == FUNCTION_DECL
3302 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3303 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3304 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3311 #undef OP_SAME_WITH_NULL
3314 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3315 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3317 When in doubt, return 0. */
3320 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3322 int unsignedp1, unsignedpo;
3323 tree primarg0, primarg1, primother;
3324 unsigned int correct_width;
3326 if (operand_equal_p (arg0, arg1, 0))
3329 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3330 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3333 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3334 and see if the inner values are the same. This removes any
3335 signedness comparison, which doesn't matter here. */
3336 primarg0 = arg0, primarg1 = arg1;
3337 STRIP_NOPS (primarg0);
3338 STRIP_NOPS (primarg1);
3339 if (operand_equal_p (primarg0, primarg1, 0))
3342 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3343 actual comparison operand, ARG0.
3345 First throw away any conversions to wider types
3346 already present in the operands. */
3348 primarg1 = get_narrower (arg1, &unsignedp1);
3349 primother = get_narrower (other, &unsignedpo);
3351 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3352 if (unsignedp1 == unsignedpo
3353 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3354 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3356 tree type = TREE_TYPE (arg0);
3358 /* Make sure shorter operand is extended the right way
3359 to match the longer operand. */
3360 primarg1 = fold_convert (signed_or_unsigned_type_for
3361 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3363 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3370 /* See if ARG is an expression that is either a comparison or is performing
3371 arithmetic on comparisons. The comparisons must only be comparing
3372 two different values, which will be stored in *CVAL1 and *CVAL2; if
3373 they are nonzero it means that some operands have already been found.
3374 No variables may be used anywhere else in the expression except in the
3375 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3376 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3378 If this is true, return 1. Otherwise, return zero. */
3381 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3383 enum tree_code code = TREE_CODE (arg);
3384 enum tree_code_class class = TREE_CODE_CLASS (code);
3386 /* We can handle some of the tcc_expression cases here. */
3387 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3389 else if (class == tcc_expression
3390 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3391 || code == COMPOUND_EXPR))
3394 else if (class == tcc_expression && code == SAVE_EXPR
3395 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3397 /* If we've already found a CVAL1 or CVAL2, this expression is
3398 two complex to handle. */
3399 if (*cval1 || *cval2)
3409 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3412 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3413 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3414 cval1, cval2, save_p));
3419 case tcc_expression:
3420 if (code == COND_EXPR)
3421 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3422 cval1, cval2, save_p)
3423 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3424 cval1, cval2, save_p)
3425 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3426 cval1, cval2, save_p));
3429 case tcc_comparison:
3430 /* First see if we can handle the first operand, then the second. For
3431 the second operand, we know *CVAL1 can't be zero. It must be that
3432 one side of the comparison is each of the values; test for the
3433 case where this isn't true by failing if the two operands
3436 if (operand_equal_p (TREE_OPERAND (arg, 0),
3437 TREE_OPERAND (arg, 1), 0))
3441 *cval1 = TREE_OPERAND (arg, 0);
3442 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3444 else if (*cval2 == 0)
3445 *cval2 = TREE_OPERAND (arg, 0);
3446 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3451 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3453 else if (*cval2 == 0)
3454 *cval2 = TREE_OPERAND (arg, 1);
3455 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3467 /* ARG is a tree that is known to contain just arithmetic operations and
3468 comparisons. Evaluate the operations in the tree substituting NEW0 for
3469 any occurrence of OLD0 as an operand of a comparison and likewise for
3473 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3475 tree type = TREE_TYPE (arg);
3476 enum tree_code code = TREE_CODE (arg);
3477 enum tree_code_class class = TREE_CODE_CLASS (code);
3479 /* We can handle some of the tcc_expression cases here. */
3480 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3482 else if (class == tcc_expression
3483 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3489 return fold_build1 (code, type,
3490 eval_subst (TREE_OPERAND (arg, 0),
3491 old0, new0, old1, new1));
3494 return fold_build2 (code, type,
3495 eval_subst (TREE_OPERAND (arg, 0),
3496 old0, new0, old1, new1),
3497 eval_subst (TREE_OPERAND (arg, 1),
3498 old0, new0, old1, new1));
3500 case tcc_expression:
3504 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3507 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3510 return fold_build3 (code, type,
3511 eval_subst (TREE_OPERAND (arg, 0),
3512 old0, new0, old1, new1),
3513 eval_subst (TREE_OPERAND (arg, 1),
3514 old0, new0, old1, new1),
3515 eval_subst (TREE_OPERAND (arg, 2),
3516 old0, new0, old1, new1));
3520 /* Fall through - ??? */
3522 case tcc_comparison:
3524 tree arg0 = TREE_OPERAND (arg, 0);
3525 tree arg1 = TREE_OPERAND (arg, 1);
3527 /* We need to check both for exact equality and tree equality. The
3528 former will be true if the operand has a side-effect. In that
3529 case, we know the operand occurred exactly once. */
3531 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3533 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3536 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3538 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3541 return fold_build2 (code, type, arg0, arg1);
3549 /* Return a tree for the case when the result of an expression is RESULT
3550 converted to TYPE and OMITTED was previously an operand of the expression
3551 but is now not needed (e.g., we folded OMITTED * 0).
3553 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3554 the conversion of RESULT to TYPE. */
3557 omit_one_operand (tree type, tree result, tree omitted)
3559 tree t = fold_convert (type, result);
3561 /* If the resulting operand is an empty statement, just return the omitted
3562 statement casted to void. */
3563 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3564 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3566 if (TREE_SIDE_EFFECTS (omitted))
3567 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3569 return non_lvalue (t);
3572 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3575 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3577 tree t = fold_convert (type, result);
3579 /* If the resulting operand is an empty statement, just return the omitted
3580 statement casted to void. */
3581 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3582 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3584 if (TREE_SIDE_EFFECTS (omitted))
3585 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3587 return pedantic_non_lvalue (t);
3590 /* Return a tree for the case when the result of an expression is RESULT
3591 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3592 of the expression but are now not needed.
3594 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3595 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3596 evaluated before OMITTED2. Otherwise, if neither has side effects,
3597 just do the conversion of RESULT to TYPE. */
3600 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3602 tree t = fold_convert (type, result);
3604 if (TREE_SIDE_EFFECTS (omitted2))
3605 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3606 if (TREE_SIDE_EFFECTS (omitted1))
3607 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3609 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3613 /* Return a simplified tree node for the truth-negation of ARG. This
3614 never alters ARG itself. We assume that ARG is an operation that
3615 returns a truth value (0 or 1).
3617 FIXME: one would think we would fold the result, but it causes
3618 problems with the dominator optimizer. */
3621 fold_truth_not_expr (tree arg)
3623 tree type = TREE_TYPE (arg);
3624 enum tree_code code = TREE_CODE (arg);
3626 /* If this is a comparison, we can simply invert it, except for
3627 floating-point non-equality comparisons, in which case we just
3628 enclose a TRUTH_NOT_EXPR around what we have. */
3630 if (TREE_CODE_CLASS (code) == tcc_comparison)
3632 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3633 if (FLOAT_TYPE_P (op_type)
3634 && flag_trapping_math
3635 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3636 && code != NE_EXPR && code != EQ_EXPR)
3640 code = invert_tree_comparison (code,
3641 HONOR_NANS (TYPE_MODE (op_type)));
3642 if (code == ERROR_MARK)
3645 return build2 (code, type,
3646 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3653 return constant_boolean_node (integer_zerop (arg), type);
3655 case TRUTH_AND_EXPR:
3656 return build2 (TRUTH_OR_EXPR, type,
3657 invert_truthvalue (TREE_OPERAND (arg, 0)),
3658 invert_truthvalue (TREE_OPERAND (arg, 1)));
3661 return build2 (TRUTH_AND_EXPR, type,
3662 invert_truthvalue (TREE_OPERAND (arg, 0)),
3663 invert_truthvalue (TREE_OPERAND (arg, 1)));
3665 case TRUTH_XOR_EXPR:
3666 /* Here we can invert either operand. We invert the first operand
3667 unless the second operand is a TRUTH_NOT_EXPR in which case our
3668 result is the XOR of the first operand with the inside of the
3669 negation of the second operand. */
3671 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3672 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3673 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3675 return build2 (TRUTH_XOR_EXPR, type,
3676 invert_truthvalue (TREE_OPERAND (arg, 0)),
3677 TREE_OPERAND (arg, 1));
3679 case TRUTH_ANDIF_EXPR:
3680 return build2 (TRUTH_ORIF_EXPR, type,
3681 invert_truthvalue (TREE_OPERAND (arg, 0)),
3682 invert_truthvalue (TREE_OPERAND (arg, 1)));
3684 case TRUTH_ORIF_EXPR:
3685 return build2 (TRUTH_ANDIF_EXPR, type,
3686 invert_truthvalue (TREE_OPERAND (arg, 0)),
3687 invert_truthvalue (TREE_OPERAND (arg, 1)));
3689 case TRUTH_NOT_EXPR:
3690 return TREE_OPERAND (arg, 0);
3694 tree arg1 = TREE_OPERAND (arg, 1);
3695 tree arg2 = TREE_OPERAND (arg, 2);
3696 /* A COND_EXPR may have a throw as one operand, which
3697 then has void type. Just leave void operands
3699 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3700 VOID_TYPE_P (TREE_TYPE (arg1))
3701 ? arg1 : invert_truthvalue (arg1),
3702 VOID_TYPE_P (TREE_TYPE (arg2))
3703 ? arg2 : invert_truthvalue (arg2));
3707 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3708 invert_truthvalue (TREE_OPERAND (arg, 1)));
3710 case NON_LVALUE_EXPR:
3711 return invert_truthvalue (TREE_OPERAND (arg, 0));
3714 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3715 return build1 (TRUTH_NOT_EXPR, type, arg);
3719 return build1 (TREE_CODE (arg), type,
3720 invert_truthvalue (TREE_OPERAND (arg, 0)));
3723 if (!integer_onep (TREE_OPERAND (arg, 1)))
3725 return build2 (EQ_EXPR, type, arg,
3726 build_int_cst (type, 0));
3729 return build1 (TRUTH_NOT_EXPR, type, arg);
3731 case CLEANUP_POINT_EXPR:
3732 return build1 (CLEANUP_POINT_EXPR, type,
3733 invert_truthvalue (TREE_OPERAND (arg, 0)));
3742 /* Return a simplified tree node for the truth-negation of ARG. This
3743 never alters ARG itself. We assume that ARG is an operation that
3744 returns a truth value (0 or 1).
3746 FIXME: one would think we would fold the result, but it causes
3747 problems with the dominator optimizer. */
3750 invert_truthvalue (tree arg)
3754 if (TREE_CODE (arg) == ERROR_MARK)
3757 tem = fold_truth_not_expr (arg);
3759 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3764 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3765 operands are another bit-wise operation with a common input. If so,
3766 distribute the bit operations to save an operation and possibly two if
3767 constants are involved. For example, convert
3768 (A | B) & (A | C) into A | (B & C)
3769 Further simplification will occur if B and C are constants.
3771 If this optimization cannot be done, 0 will be returned. */
3774 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3779 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3780 || TREE_CODE (arg0) == code
3781 || (TREE_CODE (arg0) != BIT_AND_EXPR
3782 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3785 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3787 common = TREE_OPERAND (arg0, 0);
3788 left = TREE_OPERAND (arg0, 1);
3789 right = TREE_OPERAND (arg1, 1);
3791 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3793 common = TREE_OPERAND (arg0, 0);
3794 left = TREE_OPERAND (arg0, 1);
3795 right = TREE_OPERAND (arg1, 0);
3797 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3799 common = TREE_OPERAND (arg0, 1);
3800 left = TREE_OPERAND (arg0, 0);
3801 right = TREE_OPERAND (arg1, 1);
3803 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3805 common = TREE_OPERAND (arg0, 1);
3806 left = TREE_OPERAND (arg0, 0);
3807 right = TREE_OPERAND (arg1, 0);
3812 return fold_build2 (TREE_CODE (arg0), type, common,
3813 fold_build2 (code, type, left, right));
3816 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3817 with code CODE. This optimization is unsafe. */
3819 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3821 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3822 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3824 /* (A / C) +- (B / C) -> (A +- B) / C. */
3826 && operand_equal_p (TREE_OPERAND (arg0, 1),
3827 TREE_OPERAND (arg1, 1), 0))
3828 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3829 fold_build2 (code, type,
3830 TREE_OPERAND (arg0, 0),
3831 TREE_OPERAND (arg1, 0)),
3832 TREE_OPERAND (arg0, 1));
3834 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3835 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3836 TREE_OPERAND (arg1, 0), 0)
3837 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3838 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3840 REAL_VALUE_TYPE r0, r1;
3841 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3842 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3844 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3846 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3847 real_arithmetic (&r0, code, &r0, &r1);
3848 return fold_build2 (MULT_EXPR, type,
3849 TREE_OPERAND (arg0, 0),
3850 build_real (type, r0));
3856 /* Subroutine for fold_truthop: decode a field reference.
3858 If EXP is a comparison reference, we return the innermost reference.
3860 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3861 set to the starting bit number.
3863 If the innermost field can be completely contained in a mode-sized
3864 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3866 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3867 otherwise it is not changed.
3869 *PUNSIGNEDP is set to the signedness of the field.
3871 *PMASK is set to the mask used. This is either contained in a
3872 BIT_AND_EXPR or derived from the width of the field.
3874 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3876 Return 0 if this is not a component reference or is one that we can't
3877 do anything with. */
3880 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3881 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3882 int *punsignedp, int *pvolatilep,
3883 tree *pmask, tree *pand_mask)
3885 tree outer_type = 0;
3887 tree mask, inner, offset;
3889 unsigned int precision;
3891 /* All the optimizations using this function assume integer fields.
3892 There are problems with FP fields since the type_for_size call
3893 below can fail for, e.g., XFmode. */
3894 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3897 /* We are interested in the bare arrangement of bits, so strip everything
3898 that doesn't affect the machine mode. However, record the type of the
3899 outermost expression if it may matter below. */
3900 if (TREE_CODE (exp) == NOP_EXPR
3901 || TREE_CODE (exp) == CONVERT_EXPR
3902 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3903 outer_type = TREE_TYPE (exp);
3906 if (TREE_CODE (exp) == BIT_AND_EXPR)
3908 and_mask = TREE_OPERAND (exp, 1);
3909 exp = TREE_OPERAND (exp, 0);
3910 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3911 if (TREE_CODE (and_mask) != INTEGER_CST)
3915 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3916 punsignedp, pvolatilep, false);
3917 if ((inner == exp && and_mask == 0)
3918 || *pbitsize < 0 || offset != 0
3919 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3922 /* If the number of bits in the reference is the same as the bitsize of
3923 the outer type, then the outer type gives the signedness. Otherwise
3924 (in case of a small bitfield) the signedness is unchanged. */
3925 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3926 *punsignedp = TYPE_UNSIGNED (outer_type);
3928 /* Compute the mask to access the bitfield. */
3929 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3930 precision = TYPE_PRECISION (unsigned_type);
3932 mask = build_int_cst_type (unsigned_type, -1);
3934 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3935 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3937 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3939 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3940 fold_convert (unsigned_type, and_mask), mask);
3943 *pand_mask = and_mask;
3947 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3948 represents the sign bit of EXP's type. If EXP represents a sign
3949 or zero extension, also test VAL against the unextended type.
3950 The return value is the (sub)expression whose sign bit is VAL,
3951 or NULL_TREE otherwise. */
3954 sign_bit_p (tree exp, const_tree val)
3956 unsigned HOST_WIDE_INT mask_lo, lo;
3957 HOST_WIDE_INT mask_hi, hi;
3961 /* Tree EXP must have an integral type. */
3962 t = TREE_TYPE (exp);
3963 if (! INTEGRAL_TYPE_P (t))
3966 /* Tree VAL must be an integer constant. */
3967 if (TREE_CODE (val) != INTEGER_CST
3968 || TREE_OVERFLOW (val))
3971 width = TYPE_PRECISION (t);
3972 if (width > HOST_BITS_PER_WIDE_INT)
3974 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3977 mask_hi = ((unsigned HOST_WIDE_INT) -1
3978 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3984 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3987 mask_lo = ((unsigned HOST_WIDE_INT) -1
3988 >> (HOST_BITS_PER_WIDE_INT - width));
3991 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3992 treat VAL as if it were unsigned. */
3993 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3994 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3997 /* Handle extension from a narrower type. */
3998 if (TREE_CODE (exp) == NOP_EXPR
3999 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4000 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4005 /* Subroutine for fold_truthop: determine if an operand is simple enough
4006 to be evaluated unconditionally. */
4009 simple_operand_p (const_tree exp)
4011 /* Strip any conversions that don't change the machine mode. */
4014 return (CONSTANT_CLASS_P (exp)
4015 || TREE_CODE (exp) == SSA_NAME
4017 && ! TREE_ADDRESSABLE (exp)
4018 && ! TREE_THIS_VOLATILE (exp)
4019 && ! DECL_NONLOCAL (exp)
4020 /* Don't regard global variables as simple. They may be
4021 allocated in ways unknown to the compiler (shared memory,
4022 #pragma weak, etc). */
4023 && ! TREE_PUBLIC (exp)
4024 && ! DECL_EXTERNAL (exp)
4025 /* Loading a static variable is unduly expensive, but global
4026 registers aren't expensive. */
4027 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4030 /* The following functions are subroutines to fold_range_test and allow it to
4031 try to change a logical combination of comparisons into a range test.
4034 X == 2 || X == 3 || X == 4 || X == 5
4038 (unsigned) (X - 2) <= 3
4040 We describe each set of comparisons as being either inside or outside
4041 a range, using a variable named like IN_P, and then describe the
4042 range with a lower and upper bound. If one of the bounds is omitted,
4043 it represents either the highest or lowest value of the type.
4045 In the comments below, we represent a range by two numbers in brackets
4046 preceded by a "+" to designate being inside that range, or a "-" to
4047 designate being outside that range, so the condition can be inverted by
4048 flipping the prefix. An omitted bound is represented by a "-". For
4049 example, "- [-, 10]" means being outside the range starting at the lowest
4050 possible value and ending at 10, in other words, being greater than 10.
4051 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4054 We set up things so that the missing bounds are handled in a consistent
4055 manner so neither a missing bound nor "true" and "false" need to be
4056 handled using a special case. */
4058 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4059 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4060 and UPPER1_P are nonzero if the respective argument is an upper bound
4061 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4062 must be specified for a comparison. ARG1 will be converted to ARG0's
4063 type if both are specified. */
4066 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4067 tree arg1, int upper1_p)
4073 /* If neither arg represents infinity, do the normal operation.
4074 Else, if not a comparison, return infinity. Else handle the special
4075 comparison rules. Note that most of the cases below won't occur, but
4076 are handled for consistency. */
4078 if (arg0 != 0 && arg1 != 0)
4080 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4081 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4083 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4086 if (TREE_CODE_CLASS (code) != tcc_comparison)
4089 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4090 for neither. In real maths, we cannot assume open ended ranges are
4091 the same. But, this is computer arithmetic, where numbers are finite.
4092 We can therefore make the transformation of any unbounded range with
4093 the value Z, Z being greater than any representable number. This permits
4094 us to treat unbounded ranges as equal. */
4095 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4096 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4100 result = sgn0 == sgn1;
4103 result = sgn0 != sgn1;
4106 result = sgn0 < sgn1;
4109 result = sgn0 <= sgn1;
4112 result = sgn0 > sgn1;
4115 result = sgn0 >= sgn1;
4121 return constant_boolean_node (result, type);
4124 /* Given EXP, a logical expression, set the range it is testing into
4125 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4126 actually being tested. *PLOW and *PHIGH will be made of the same
4127 type as the returned expression. If EXP is not a comparison, we
4128 will most likely not be returning a useful value and range. Set
4129 *STRICT_OVERFLOW_P to true if the return value is only valid
4130 because signed overflow is undefined; otherwise, do not change
4131 *STRICT_OVERFLOW_P. */
4134 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4135 bool *strict_overflow_p)
4137 enum tree_code code;
4138 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4139 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4141 tree low, high, n_low, n_high;
4143 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4144 and see if we can refine the range. Some of the cases below may not
4145 happen, but it doesn't seem worth worrying about this. We "continue"
4146 the outer loop when we've changed something; otherwise we "break"
4147 the switch, which will "break" the while. */
4150 low = high = build_int_cst (TREE_TYPE (exp), 0);
4154 code = TREE_CODE (exp);
4155 exp_type = TREE_TYPE (exp);
4157 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4159 if (TREE_OPERAND_LENGTH (exp) > 0)
4160 arg0 = TREE_OPERAND (exp, 0);
4161 if (TREE_CODE_CLASS (code) == tcc_comparison
4162 || TREE_CODE_CLASS (code) == tcc_unary
4163 || TREE_CODE_CLASS (code) == tcc_binary)
4164 arg0_type = TREE_TYPE (arg0);
4165 if (TREE_CODE_CLASS (code) == tcc_binary
4166 || TREE_CODE_CLASS (code) == tcc_comparison
4167 || (TREE_CODE_CLASS (code) == tcc_expression
4168 && TREE_OPERAND_LENGTH (exp) > 1))
4169 arg1 = TREE_OPERAND (exp, 1);
4174 case TRUTH_NOT_EXPR:
4175 in_p = ! in_p, exp = arg0;
4178 case EQ_EXPR: case NE_EXPR:
4179 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4180 /* We can only do something if the range is testing for zero
4181 and if the second operand is an integer constant. Note that
4182 saying something is "in" the range we make is done by
4183 complementing IN_P since it will set in the initial case of
4184 being not equal to zero; "out" is leaving it alone. */
4185 if (low == 0 || high == 0
4186 || ! integer_zerop (low) || ! integer_zerop (high)
4187 || TREE_CODE (arg1) != INTEGER_CST)
4192 case NE_EXPR: /* - [c, c] */
4195 case EQ_EXPR: /* + [c, c] */
4196 in_p = ! in_p, low = high = arg1;
4198 case GT_EXPR: /* - [-, c] */
4199 low = 0, high = arg1;
4201 case GE_EXPR: /* + [c, -] */
4202 in_p = ! in_p, low = arg1, high = 0;
4204 case LT_EXPR: /* - [c, -] */
4205 low = arg1, high = 0;
4207 case LE_EXPR: /* + [-, c] */
4208 in_p = ! in_p, low = 0, high = arg1;
4214 /* If this is an unsigned comparison, we also know that EXP is
4215 greater than or equal to zero. We base the range tests we make
4216 on that fact, so we record it here so we can parse existing
4217 range tests. We test arg0_type since often the return type
4218 of, e.g. EQ_EXPR, is boolean. */
4219 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4221 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4223 build_int_cst (arg0_type, 0),
4227 in_p = n_in_p, low = n_low, high = n_high;
4229 /* If the high bound is missing, but we have a nonzero low
4230 bound, reverse the range so it goes from zero to the low bound
4232 if (high == 0 && low && ! integer_zerop (low))
4235 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4236 integer_one_node, 0);
4237 low = build_int_cst (arg0_type, 0);
4245 /* (-x) IN [a,b] -> x in [-b, -a] */
4246 n_low = range_binop (MINUS_EXPR, exp_type,
4247 build_int_cst (exp_type, 0),
4249 n_high = range_binop (MINUS_EXPR, exp_type,
4250 build_int_cst (exp_type, 0),
4252 low = n_low, high = n_high;
4258 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4259 build_int_cst (exp_type, 1));
4262 case PLUS_EXPR: case MINUS_EXPR:
4263 if (TREE_CODE (arg1) != INTEGER_CST)
4266 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4267 move a constant to the other side. */
4268 if (!TYPE_UNSIGNED (arg0_type)
4269 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4272 /* If EXP is signed, any overflow in the computation is undefined,
4273 so we don't worry about it so long as our computations on
4274 the bounds don't overflow. For unsigned, overflow is defined
4275 and this is exactly the right thing. */
4276 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4277 arg0_type, low, 0, arg1, 0);
4278 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4279 arg0_type, high, 1, arg1, 0);
4280 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4281 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4284 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4285 *strict_overflow_p = true;
4287 /* Check for an unsigned range which has wrapped around the maximum
4288 value thus making n_high < n_low, and normalize it. */
4289 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4291 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4292 integer_one_node, 0);
4293 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4294 integer_one_node, 0);
4296 /* If the range is of the form +/- [ x+1, x ], we won't
4297 be able to normalize it. But then, it represents the
4298 whole range or the empty set, so make it
4300 if (tree_int_cst_equal (n_low, low)
4301 && tree_int_cst_equal (n_high, high))
4307 low = n_low, high = n_high;
4312 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4313 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4316 if (! INTEGRAL_TYPE_P (arg0_type)
4317 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4318 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4321 n_low = low, n_high = high;
4324 n_low = fold_convert (arg0_type, n_low);
4327 n_high = fold_convert (arg0_type, n_high);
4330 /* If we're converting arg0 from an unsigned type, to exp,
4331 a signed type, we will be doing the comparison as unsigned.
4332 The tests above have already verified that LOW and HIGH
4335 So we have to ensure that we will handle large unsigned
4336 values the same way that the current signed bounds treat
4339 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4343 /* For fixed-point modes, we need to pass the saturating flag
4344 as the 2nd parameter. */
4345 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4346 equiv_type = lang_hooks.types.type_for_mode
4347 (TYPE_MODE (arg0_type),
4348 TYPE_SATURATING (arg0_type));
4350 equiv_type = lang_hooks.types.type_for_mode
4351 (TYPE_MODE (arg0_type), 1);
4353 /* A range without an upper bound is, naturally, unbounded.
4354 Since convert would have cropped a very large value, use
4355 the max value for the destination type. */
4357 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4358 : TYPE_MAX_VALUE (arg0_type);
4360 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4361 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4362 fold_convert (arg0_type,
4364 build_int_cst (arg0_type, 1));
4366 /* If the low bound is specified, "and" the range with the
4367 range for which the original unsigned value will be
4371 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4372 1, n_low, n_high, 1,
4373 fold_convert (arg0_type,
4378 in_p = (n_in_p == in_p);
4382 /* Otherwise, "or" the range with the range of the input
4383 that will be interpreted as negative. */
4384 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4385 0, n_low, n_high, 1,
4386 fold_convert (arg0_type,
4391 in_p = (in_p != n_in_p);
4396 low = n_low, high = n_high;
4406 /* If EXP is a constant, we can evaluate whether this is true or false. */
4407 if (TREE_CODE (exp) == INTEGER_CST)
4409 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4411 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4417 *pin_p = in_p, *plow = low, *phigh = high;
4421 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4422 type, TYPE, return an expression to test if EXP is in (or out of, depending
4423 on IN_P) the range. Return 0 if the test couldn't be created. */
4426 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4428 tree etype = TREE_TYPE (exp);
4431 #ifdef HAVE_canonicalize_funcptr_for_compare
4432 /* Disable this optimization for function pointer expressions
4433 on targets that require function pointer canonicalization. */
4434 if (HAVE_canonicalize_funcptr_for_compare
4435 && TREE_CODE (etype) == POINTER_TYPE
4436 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4442 value = build_range_check (type, exp, 1, low, high);
4444 return invert_truthvalue (value);
4449 if (low == 0 && high == 0)
4450 return build_int_cst (type, 1);
4453 return fold_build2 (LE_EXPR, type, exp,
4454 fold_convert (etype, high));
4457 return fold_build2 (GE_EXPR, type, exp,
4458 fold_convert (etype, low));
4460 if (operand_equal_p (low, high, 0))
4461 return fold_build2 (EQ_EXPR, type, exp,
4462 fold_convert (etype, low));
4464 if (integer_zerop (low))
4466 if (! TYPE_UNSIGNED (etype))
4468 etype = unsigned_type_for (etype);
4469 high = fold_convert (etype, high);
4470 exp = fold_convert (etype, exp);
4472 return build_range_check (type, exp, 1, 0, high);
4475 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4476 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4478 unsigned HOST_WIDE_INT lo;
4482 prec = TYPE_PRECISION (etype);
4483 if (prec <= HOST_BITS_PER_WIDE_INT)
4486 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4490 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4491 lo = (unsigned HOST_WIDE_INT) -1;
4494 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4496 if (TYPE_UNSIGNED (etype))
4498 etype = signed_type_for (etype);
4499 exp = fold_convert (etype, exp);
4501 return fold_build2 (GT_EXPR, type, exp,
4502 build_int_cst (etype, 0));
4506 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4507 This requires wrap-around arithmetics for the type of the expression. */
4508 switch (TREE_CODE (etype))
4511 /* There is no requirement that LOW be within the range of ETYPE
4512 if the latter is a subtype. It must, however, be within the base
4513 type of ETYPE. So be sure we do the subtraction in that type. */
4514 if (TREE_TYPE (etype))
4515 etype = TREE_TYPE (etype);
4520 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4521 TYPE_UNSIGNED (etype));
4528 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4529 if (TREE_CODE (etype) == INTEGER_TYPE
4530 && !TYPE_OVERFLOW_WRAPS (etype))
4532 tree utype, minv, maxv;
4534 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4535 for the type in question, as we rely on this here. */
4536 utype = unsigned_type_for (etype);
4537 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4538 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4539 integer_one_node, 1);
4540 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4542 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4549 high = fold_convert (etype, high);
4550 low = fold_convert (etype, low);
4551 exp = fold_convert (etype, exp);
4553 value = const_binop (MINUS_EXPR, high, low, 0);
4556 if (POINTER_TYPE_P (etype))
4558 if (value != 0 && !TREE_OVERFLOW (value))
4560 low = fold_convert (sizetype, low);
4561 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4562 return build_range_check (type,
4563 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4564 1, build_int_cst (etype, 0), value);
4569 if (value != 0 && !TREE_OVERFLOW (value))
4570 return build_range_check (type,
4571 fold_build2 (MINUS_EXPR, etype, exp, low),
4572 1, build_int_cst (etype, 0), value);
4577 /* Return the predecessor of VAL in its type, handling the infinite case. */
4580 range_predecessor (tree val)
4582 tree type = TREE_TYPE (val);
4584 if (INTEGRAL_TYPE_P (type)
4585 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4588 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4591 /* Return the successor of VAL in its type, handling the infinite case. */
4594 range_successor (tree val)
4596 tree type = TREE_TYPE (val);
4598 if (INTEGRAL_TYPE_P (type)
4599 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4602 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4605 /* Given two ranges, see if we can merge them into one. Return 1 if we
4606 can, 0 if we can't. Set the output range into the specified parameters. */
4609 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4610 tree high0, int in1_p, tree low1, tree high1)
4618 int lowequal = ((low0 == 0 && low1 == 0)
4619 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4620 low0, 0, low1, 0)));
4621 int highequal = ((high0 == 0 && high1 == 0)
4622 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4623 high0, 1, high1, 1)));
4625 /* Make range 0 be the range that starts first, or ends last if they
4626 start at the same value. Swap them if it isn't. */
4627 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4630 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4631 high1, 1, high0, 1))))
4633 temp = in0_p, in0_p = in1_p, in1_p = temp;
4634 tem = low0, low0 = low1, low1 = tem;
4635 tem = high0, high0 = high1, high1 = tem;
4638 /* Now flag two cases, whether the ranges are disjoint or whether the
4639 second range is totally subsumed in the first. Note that the tests
4640 below are simplified by the ones above. */
4641 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4642 high0, 1, low1, 0));
4643 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4644 high1, 1, high0, 1));
4646 /* We now have four cases, depending on whether we are including or
4647 excluding the two ranges. */
4650 /* If they don't overlap, the result is false. If the second range
4651 is a subset it is the result. Otherwise, the range is from the start
4652 of the second to the end of the first. */
4654 in_p = 0, low = high = 0;
4656 in_p = 1, low = low1, high = high1;
4658 in_p = 1, low = low1, high = high0;
4661 else if (in0_p && ! in1_p)
4663 /* If they don't overlap, the result is the first range. If they are
4664 equal, the result is false. If the second range is a subset of the
4665 first, and the ranges begin at the same place, we go from just after
4666 the end of the second range to the end of the first. If the second
4667 range is not a subset of the first, or if it is a subset and both
4668 ranges end at the same place, the range starts at the start of the
4669 first range and ends just before the second range.
4670 Otherwise, we can't describe this as a single range. */
4672 in_p = 1, low = low0, high = high0;
4673 else if (lowequal && highequal)
4674 in_p = 0, low = high = 0;
4675 else if (subset && lowequal)
4677 low = range_successor (high1);
4682 /* We are in the weird situation where high0 > high1 but
4683 high1 has no successor. Punt. */
4687 else if (! subset || highequal)
4690 high = range_predecessor (low1);
4694 /* low0 < low1 but low1 has no predecessor. Punt. */
4702 else if (! in0_p && in1_p)
4704 /* If they don't overlap, the result is the second range. If the second
4705 is a subset of the first, the result is false. Otherwise,
4706 the range starts just after the first range and ends at the
4707 end of the second. */
4709 in_p = 1, low = low1, high = high1;
4710 else if (subset || highequal)
4711 in_p = 0, low = high = 0;
4714 low = range_successor (high0);
4719 /* high1 > high0 but high0 has no successor. Punt. */
4727 /* The case where we are excluding both ranges. Here the complex case
4728 is if they don't overlap. In that case, the only time we have a
4729 range is if they are adjacent. If the second is a subset of the
4730 first, the result is the first. Otherwise, the range to exclude
4731 starts at the beginning of the first range and ends at the end of the
4735 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4736 range_successor (high0),
4738 in_p = 0, low = low0, high = high1;
4741 /* Canonicalize - [min, x] into - [-, x]. */
4742 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4743 switch (TREE_CODE (TREE_TYPE (low0)))
4746 if (TYPE_PRECISION (TREE_TYPE (low0))
4747 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4751 if (tree_int_cst_equal (low0,
4752 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4756 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4757 && integer_zerop (low0))
4764 /* Canonicalize - [x, max] into - [x, -]. */
4765 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4766 switch (TREE_CODE (TREE_TYPE (high1)))
4769 if (TYPE_PRECISION (TREE_TYPE (high1))
4770 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4774 if (tree_int_cst_equal (high1,
4775 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4779 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4780 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4782 integer_one_node, 1)))
4789 /* The ranges might be also adjacent between the maximum and
4790 minimum values of the given type. For
4791 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4792 return + [x + 1, y - 1]. */
4793 if (low0 == 0 && high1 == 0)
4795 low = range_successor (high0);
4796 high = range_predecessor (low1);
4797 if (low == 0 || high == 0)
4807 in_p = 0, low = low0, high = high0;
4809 in_p = 0, low = low0, high = high1;
4812 *pin_p = in_p, *plow = low, *phigh = high;
4817 /* Subroutine of fold, looking inside expressions of the form
4818 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4819 of the COND_EXPR. This function is being used also to optimize
4820 A op B ? C : A, by reversing the comparison first.
4822 Return a folded expression whose code is not a COND_EXPR
4823 anymore, or NULL_TREE if no folding opportunity is found. */
4826 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4828 enum tree_code comp_code = TREE_CODE (arg0);
4829 tree arg00 = TREE_OPERAND (arg0, 0);
4830 tree arg01 = TREE_OPERAND (arg0, 1);
4831 tree arg1_type = TREE_TYPE (arg1);
4837 /* If we have A op 0 ? A : -A, consider applying the following
4840 A == 0? A : -A same as -A
4841 A != 0? A : -A same as A
4842 A >= 0? A : -A same as abs (A)
4843 A > 0? A : -A same as abs (A)
4844 A <= 0? A : -A same as -abs (A)
4845 A < 0? A : -A same as -abs (A)
4847 None of these transformations work for modes with signed
4848 zeros. If A is +/-0, the first two transformations will
4849 change the sign of the result (from +0 to -0, or vice
4850 versa). The last four will fix the sign of the result,
4851 even though the original expressions could be positive or
4852 negative, depending on the sign of A.
4854 Note that all these transformations are correct if A is
4855 NaN, since the two alternatives (A and -A) are also NaNs. */
4856 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4857 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4858 ? real_zerop (arg01)
4859 : integer_zerop (arg01))
4860 && ((TREE_CODE (arg2) == NEGATE_EXPR
4861 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4862 /* In the case that A is of the form X-Y, '-A' (arg2) may
4863 have already been folded to Y-X, check for that. */
4864 || (TREE_CODE (arg1) == MINUS_EXPR
4865 && TREE_CODE (arg2) == MINUS_EXPR
4866 && operand_equal_p (TREE_OPERAND (arg1, 0),
4867 TREE_OPERAND (arg2, 1), 0)
4868 && operand_equal_p (TREE_OPERAND (arg1, 1),
4869 TREE_OPERAND (arg2, 0), 0))))
4874 tem = fold_convert (arg1_type, arg1);
4875 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4878 return pedantic_non_lvalue (fold_convert (type, arg1));
4881 if (flag_trapping_math)
4886 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4887 arg1 = fold_convert (signed_type_for
4888 (TREE_TYPE (arg1)), arg1);
4889 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4890 return pedantic_non_lvalue (fold_convert (type, tem));
4893 if (flag_trapping_math)
4897 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4898 arg1 = fold_convert (signed_type_for
4899 (TREE_TYPE (arg1)), arg1);
4900 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4901 return negate_expr (fold_convert (type, tem));
4903 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4907 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4908 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4909 both transformations are correct when A is NaN: A != 0
4910 is then true, and A == 0 is false. */
4912 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4913 && integer_zerop (arg01) && integer_zerop (arg2))
4915 if (comp_code == NE_EXPR)
4916 return pedantic_non_lvalue (fold_convert (type, arg1));
4917 else if (comp_code == EQ_EXPR)
4918 return build_int_cst (type, 0);
4921 /* Try some transformations of A op B ? A : B.
4923 A == B? A : B same as B
4924 A != B? A : B same as A
4925 A >= B? A : B same as max (A, B)
4926 A > B? A : B same as max (B, A)
4927 A <= B? A : B same as min (A, B)
4928 A < B? A : B same as min (B, A)
4930 As above, these transformations don't work in the presence
4931 of signed zeros. For example, if A and B are zeros of
4932 opposite sign, the first two transformations will change
4933 the sign of the result. In the last four, the original
4934 expressions give different results for (A=+0, B=-0) and
4935 (A=-0, B=+0), but the transformed expressions do not.
4937 The first two transformations are correct if either A or B
4938 is a NaN. In the first transformation, the condition will
4939 be false, and B will indeed be chosen. In the case of the
4940 second transformation, the condition A != B will be true,
4941 and A will be chosen.
4943 The conversions to max() and min() are not correct if B is
4944 a number and A is not. The conditions in the original
4945 expressions will be false, so all four give B. The min()
4946 and max() versions would give a NaN instead. */
4947 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4948 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4949 /* Avoid these transformations if the COND_EXPR may be used
4950 as an lvalue in the C++ front-end. PR c++/19199. */
4952 || (strcmp (lang_hooks.name, "GNU C++") != 0
4953 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4954 || ! maybe_lvalue_p (arg1)
4955 || ! maybe_lvalue_p (arg2)))
4957 tree comp_op0 = arg00;
4958 tree comp_op1 = arg01;
4959 tree comp_type = TREE_TYPE (comp_op0);
4961 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4962 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4972 return pedantic_non_lvalue (fold_convert (type, arg2));
4974 return pedantic_non_lvalue (fold_convert (type, arg1));
4979 /* In C++ a ?: expression can be an lvalue, so put the
4980 operand which will be used if they are equal first
4981 so that we can convert this back to the
4982 corresponding COND_EXPR. */
4983 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4985 comp_op0 = fold_convert (comp_type, comp_op0);
4986 comp_op1 = fold_convert (comp_type, comp_op1);
4987 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4988 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4989 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4990 return pedantic_non_lvalue (fold_convert (type, tem));
4997 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4999 comp_op0 = fold_convert (comp_type, comp_op0);
5000 comp_op1 = fold_convert (comp_type, comp_op1);
5001 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5002 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5003 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5004 return pedantic_non_lvalue (fold_convert (type, tem));
5008 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5009 return pedantic_non_lvalue (fold_convert (type, arg2));
5012 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5013 return pedantic_non_lvalue (fold_convert (type, arg1));
5016 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5021 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5022 we might still be able to simplify this. For example,
5023 if C1 is one less or one more than C2, this might have started
5024 out as a MIN or MAX and been transformed by this function.
5025 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5027 if (INTEGRAL_TYPE_P (type)
5028 && TREE_CODE (arg01) == INTEGER_CST
5029 && TREE_CODE (arg2) == INTEGER_CST)
5033 /* We can replace A with C1 in this case. */
5034 arg1 = fold_convert (type, arg01);
5035 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5038 /* If C1 is C2 + 1, this is min(A, C2). */
5039 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5041 && operand_equal_p (arg01,
5042 const_binop (PLUS_EXPR, arg2,
5043 build_int_cst (type, 1), 0),
5045 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5047 fold_convert (type, arg1),
5052 /* If C1 is C2 - 1, this is min(A, C2). */
5053 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5055 && operand_equal_p (arg01,
5056 const_binop (MINUS_EXPR, arg2,
5057 build_int_cst (type, 1), 0),
5059 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5061 fold_convert (type, arg1),
5066 /* If C1 is C2 - 1, this is max(A, C2). */
5067 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5069 && operand_equal_p (arg01,
5070 const_binop (MINUS_EXPR, arg2,
5071 build_int_cst (type, 1), 0),
5073 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5075 fold_convert (type, arg1),
5080 /* If C1 is C2 + 1, this is max(A, C2). */
5081 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5083 && operand_equal_p (arg01,
5084 const_binop (PLUS_EXPR, arg2,
5085 build_int_cst (type, 1), 0),
5087 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5089 fold_convert (type, arg1),
5103 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5104 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5107 /* EXP is some logical combination of boolean tests. See if we can
5108 merge it into some range test. Return the new tree if so. */
5111 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5113 int or_op = (code == TRUTH_ORIF_EXPR
5114 || code == TRUTH_OR_EXPR);
5115 int in0_p, in1_p, in_p;
5116 tree low0, low1, low, high0, high1, high;
5117 bool strict_overflow_p = false;
5118 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5119 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5121 const char * const warnmsg = G_("assuming signed overflow does not occur "
5122 "when simplifying range test");
5124 /* If this is an OR operation, invert both sides; we will invert
5125 again at the end. */
5127 in0_p = ! in0_p, in1_p = ! in1_p;
5129 /* If both expressions are the same, if we can merge the ranges, and we
5130 can build the range test, return it or it inverted. If one of the
5131 ranges is always true or always false, consider it to be the same
5132 expression as the other. */
5133 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5134 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5136 && 0 != (tem = (build_range_check (type,
5138 : rhs != 0 ? rhs : integer_zero_node,
5141 if (strict_overflow_p)
5142 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5143 return or_op ? invert_truthvalue (tem) : tem;
5146 /* On machines where the branch cost is expensive, if this is a
5147 short-circuited branch and the underlying object on both sides
5148 is the same, make a non-short-circuit operation. */
5149 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5150 && lhs != 0 && rhs != 0
5151 && (code == TRUTH_ANDIF_EXPR
5152 || code == TRUTH_ORIF_EXPR)
5153 && operand_equal_p (lhs, rhs, 0))
5155 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5156 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5157 which cases we can't do this. */
5158 if (simple_operand_p (lhs))
5159 return build2 (code == TRUTH_ANDIF_EXPR
5160 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5163 else if (lang_hooks.decls.global_bindings_p () == 0
5164 && ! CONTAINS_PLACEHOLDER_P (lhs))
5166 tree common = save_expr (lhs);
5168 if (0 != (lhs = build_range_check (type, common,
5169 or_op ? ! in0_p : in0_p,
5171 && (0 != (rhs = build_range_check (type, common,
5172 or_op ? ! in1_p : in1_p,
5175 if (strict_overflow_p)
5176 fold_overflow_warning (warnmsg,
5177 WARN_STRICT_OVERFLOW_COMPARISON);
5178 return build2 (code == TRUTH_ANDIF_EXPR
5179 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5188 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5189 bit value. Arrange things so the extra bits will be set to zero if and
5190 only if C is signed-extended to its full width. If MASK is nonzero,
5191 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5194 unextend (tree c, int p, int unsignedp, tree mask)
5196 tree type = TREE_TYPE (c);
5197 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5200 if (p == modesize || unsignedp)
5203 /* We work by getting just the sign bit into the low-order bit, then
5204 into the high-order bit, then sign-extend. We then XOR that value
5206 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5207 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5209 /* We must use a signed type in order to get an arithmetic right shift.
5210 However, we must also avoid introducing accidental overflows, so that
5211 a subsequent call to integer_zerop will work. Hence we must
5212 do the type conversion here. At this point, the constant is either
5213 zero or one, and the conversion to a signed type can never overflow.
5214 We could get an overflow if this conversion is done anywhere else. */
5215 if (TYPE_UNSIGNED (type))
5216 temp = fold_convert (signed_type_for (type), temp);
5218 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5219 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5221 temp = const_binop (BIT_AND_EXPR, temp,
5222 fold_convert (TREE_TYPE (c), mask), 0);
5223 /* If necessary, convert the type back to match the type of C. */
5224 if (TYPE_UNSIGNED (type))
5225 temp = fold_convert (type, temp);
5227 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5230 /* Find ways of folding logical expressions of LHS and RHS:
5231 Try to merge two comparisons to the same innermost item.
5232 Look for range tests like "ch >= '0' && ch <= '9'".
5233 Look for combinations of simple terms on machines with expensive branches
5234 and evaluate the RHS unconditionally.
5236 For example, if we have p->a == 2 && p->b == 4 and we can make an
5237 object large enough to span both A and B, we can do this with a comparison
5238 against the object ANDed with the a mask.
5240 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5241 operations to do this with one comparison.
5243 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5244 function and the one above.
5246 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5247 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5249 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5252 We return the simplified tree or 0 if no optimization is possible. */
5255 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5257 /* If this is the "or" of two comparisons, we can do something if
5258 the comparisons are NE_EXPR. If this is the "and", we can do something
5259 if the comparisons are EQ_EXPR. I.e.,
5260 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5262 WANTED_CODE is this operation code. For single bit fields, we can
5263 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5264 comparison for one-bit fields. */
5266 enum tree_code wanted_code;
5267 enum tree_code lcode, rcode;
5268 tree ll_arg, lr_arg, rl_arg, rr_arg;
5269 tree ll_inner, lr_inner, rl_inner, rr_inner;
5270 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5271 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5272 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5273 HOST_WIDE_INT lnbitsize, lnbitpos;
5274 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5275 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5276 enum machine_mode lnmode;
5277 tree ll_mask, lr_mask, rl_mask, rr_mask;
5278 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5279 tree l_const, r_const;
5280 tree lntype, result;
5281 int first_bit, end_bit;
5283 tree orig_lhs = lhs, orig_rhs = rhs;
5284 enum tree_code orig_code = code;
5286 /* Start by getting the comparison codes. Fail if anything is volatile.
5287 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5288 it were surrounded with a NE_EXPR. */
5290 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5293 lcode = TREE_CODE (lhs);
5294 rcode = TREE_CODE (rhs);
5296 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5298 lhs = build2 (NE_EXPR, truth_type, lhs,
5299 build_int_cst (TREE_TYPE (lhs), 0));
5303 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5305 rhs = build2 (NE_EXPR, truth_type, rhs,
5306 build_int_cst (TREE_TYPE (rhs), 0));
5310 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5311 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5314 ll_arg = TREE_OPERAND (lhs, 0);
5315 lr_arg = TREE_OPERAND (lhs, 1);
5316 rl_arg = TREE_OPERAND (rhs, 0);
5317 rr_arg = TREE_OPERAND (rhs, 1);
5319 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5320 if (simple_operand_p (ll_arg)
5321 && simple_operand_p (lr_arg))
5324 if (operand_equal_p (ll_arg, rl_arg, 0)
5325 && operand_equal_p (lr_arg, rr_arg, 0))
5327 result = combine_comparisons (code, lcode, rcode,
5328 truth_type, ll_arg, lr_arg);
5332 else if (operand_equal_p (ll_arg, rr_arg, 0)
5333 && operand_equal_p (lr_arg, rl_arg, 0))
5335 result = combine_comparisons (code, lcode,
5336 swap_tree_comparison (rcode),
5337 truth_type, ll_arg, lr_arg);
5343 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5344 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5346 /* If the RHS can be evaluated unconditionally and its operands are
5347 simple, it wins to evaluate the RHS unconditionally on machines
5348 with expensive branches. In this case, this isn't a comparison
5349 that can be merged. Avoid doing this if the RHS is a floating-point
5350 comparison since those can trap. */
5352 if (BRANCH_COST >= 2
5353 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5354 && simple_operand_p (rl_arg)
5355 && simple_operand_p (rr_arg))
5357 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5358 if (code == TRUTH_OR_EXPR
5359 && lcode == NE_EXPR && integer_zerop (lr_arg)
5360 && rcode == NE_EXPR && integer_zerop (rr_arg)
5361 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5362 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5363 return build2 (NE_EXPR, truth_type,
5364 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5366 build_int_cst (TREE_TYPE (ll_arg), 0));
5368 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5369 if (code == TRUTH_AND_EXPR
5370 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5371 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5372 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5373 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5374 return build2 (EQ_EXPR, truth_type,
5375 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5377 build_int_cst (TREE_TYPE (ll_arg), 0));
5379 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5381 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5382 return build2 (code, truth_type, lhs, rhs);
5387 /* See if the comparisons can be merged. Then get all the parameters for
5390 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5391 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5395 ll_inner = decode_field_reference (ll_arg,
5396 &ll_bitsize, &ll_bitpos, &ll_mode,
5397 &ll_unsignedp, &volatilep, &ll_mask,
5399 lr_inner = decode_field_reference (lr_arg,
5400 &lr_bitsize, &lr_bitpos, &lr_mode,
5401 &lr_unsignedp, &volatilep, &lr_mask,
5403 rl_inner = decode_field_reference (rl_arg,
5404 &rl_bitsize, &rl_bitpos, &rl_mode,
5405 &rl_unsignedp, &volatilep, &rl_mask,
5407 rr_inner = decode_field_reference (rr_arg,
5408 &rr_bitsize, &rr_bitpos, &rr_mode,
5409 &rr_unsignedp, &volatilep, &rr_mask,
5412 /* It must be true that the inner operation on the lhs of each
5413 comparison must be the same if we are to be able to do anything.
5414 Then see if we have constants. If not, the same must be true for
5416 if (volatilep || ll_inner == 0 || rl_inner == 0
5417 || ! operand_equal_p (ll_inner, rl_inner, 0))
5420 if (TREE_CODE (lr_arg) == INTEGER_CST
5421 && TREE_CODE (rr_arg) == INTEGER_CST)
5422 l_const = lr_arg, r_const = rr_arg;
5423 else if (lr_inner == 0 || rr_inner == 0
5424 || ! operand_equal_p (lr_inner, rr_inner, 0))
5427 l_const = r_const = 0;
5429 /* If either comparison code is not correct for our logical operation,
5430 fail. However, we can convert a one-bit comparison against zero into
5431 the opposite comparison against that bit being set in the field. */
5433 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5434 if (lcode != wanted_code)
5436 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5438 /* Make the left operand unsigned, since we are only interested
5439 in the value of one bit. Otherwise we are doing the wrong
5448 /* This is analogous to the code for l_const above. */
5449 if (rcode != wanted_code)
5451 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5460 /* See if we can find a mode that contains both fields being compared on
5461 the left. If we can't, fail. Otherwise, update all constants and masks
5462 to be relative to a field of that size. */
5463 first_bit = MIN (ll_bitpos, rl_bitpos);
5464 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5465 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5466 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5468 if (lnmode == VOIDmode)
5471 lnbitsize = GET_MODE_BITSIZE (lnmode);
5472 lnbitpos = first_bit & ~ (lnbitsize - 1);
5473 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5474 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5476 if (BYTES_BIG_ENDIAN)
5478 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5479 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5482 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5483 size_int (xll_bitpos), 0);
5484 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5485 size_int (xrl_bitpos), 0);
5489 l_const = fold_convert (lntype, l_const);
5490 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5491 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5492 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5493 fold_build1 (BIT_NOT_EXPR,
5497 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5499 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5504 r_const = fold_convert (lntype, r_const);
5505 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5506 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5507 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5508 fold_build1 (BIT_NOT_EXPR,
5512 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5514 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5518 /* Handle the case of comparisons with constants. If there is something in
5519 common between the masks, those bits of the constants must be the same.
5520 If not, the condition is always false. Test for this to avoid generating
5521 incorrect code below. */
5522 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5523 if (! integer_zerop (result)
5524 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5525 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5527 if (wanted_code == NE_EXPR)
5529 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5530 return constant_boolean_node (true, truth_type);
5534 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5535 return constant_boolean_node (false, truth_type);
5542 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5546 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5549 enum tree_code op_code;
5550 tree comp_const = op1;
5552 int consts_equal, consts_lt;
5555 STRIP_SIGN_NOPS (arg0);
5557 op_code = TREE_CODE (arg0);
5558 minmax_const = TREE_OPERAND (arg0, 1);
5559 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5560 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5561 inner = TREE_OPERAND (arg0, 0);
5563 /* If something does not permit us to optimize, return the original tree. */
5564 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5565 || TREE_CODE (comp_const) != INTEGER_CST
5566 || TREE_OVERFLOW (comp_const)
5567 || TREE_CODE (minmax_const) != INTEGER_CST
5568 || TREE_OVERFLOW (minmax_const))
5571 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5572 and GT_EXPR, doing the rest with recursive calls using logical
5576 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5578 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5581 return invert_truthvalue (tem);
5587 fold_build2 (TRUTH_ORIF_EXPR, type,
5588 optimize_minmax_comparison
5589 (EQ_EXPR, type, arg0, comp_const),
5590 optimize_minmax_comparison
5591 (GT_EXPR, type, arg0, comp_const));
5594 if (op_code == MAX_EXPR && consts_equal)
5595 /* MAX (X, 0) == 0 -> X <= 0 */
5596 return fold_build2 (LE_EXPR, type, inner, comp_const);
5598 else if (op_code == MAX_EXPR && consts_lt)
5599 /* MAX (X, 0) == 5 -> X == 5 */
5600 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5602 else if (op_code == MAX_EXPR)
5603 /* MAX (X, 0) == -1 -> false */
5604 return omit_one_operand (type, integer_zero_node, inner);
5606 else if (consts_equal)
5607 /* MIN (X, 0) == 0 -> X >= 0 */
5608 return fold_build2 (GE_EXPR, type, inner, comp_const);
5611 /* MIN (X, 0) == 5 -> false */
5612 return omit_one_operand (type, integer_zero_node, inner);
5615 /* MIN (X, 0) == -1 -> X == -1 */
5616 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5619 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5620 /* MAX (X, 0) > 0 -> X > 0
5621 MAX (X, 0) > 5 -> X > 5 */
5622 return fold_build2 (GT_EXPR, type, inner, comp_const);
5624 else if (op_code == MAX_EXPR)
5625 /* MAX (X, 0) > -1 -> true */
5626 return omit_one_operand (type, integer_one_node, inner);
5628 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5629 /* MIN (X, 0) > 0 -> false
5630 MIN (X, 0) > 5 -> false */
5631 return omit_one_operand (type, integer_zero_node, inner);
5634 /* MIN (X, 0) > -1 -> X > -1 */
5635 return fold_build2 (GT_EXPR, type, inner, comp_const);
5642 /* T is an integer expression that is being multiplied, divided, or taken a
5643 modulus (CODE says which and what kind of divide or modulus) by a
5644 constant C. See if we can eliminate that operation by folding it with
5645 other operations already in T. WIDE_TYPE, if non-null, is a type that
5646 should be used for the computation if wider than our type.
5648 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5649 (X * 2) + (Y * 4). We must, however, be assured that either the original
5650 expression would not overflow or that overflow is undefined for the type
5651 in the language in question.
5653 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5654 the machine has a multiply-accumulate insn or that this is part of an
5655 addressing calculation.
5657 If we return a non-null expression, it is an equivalent form of the
5658 original computation, but need not be in the original type.
5660 We set *STRICT_OVERFLOW_P to true if the return values depends on
5661 signed overflow being undefined. Otherwise we do not change
5662 *STRICT_OVERFLOW_P. */
5665 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5666 bool *strict_overflow_p)
5668 /* To avoid exponential search depth, refuse to allow recursion past
5669 three levels. Beyond that (1) it's highly unlikely that we'll find
5670 something interesting and (2) we've probably processed it before
5671 when we built the inner expression. */
5680 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5687 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5688 bool *strict_overflow_p)
5690 tree type = TREE_TYPE (t);
5691 enum tree_code tcode = TREE_CODE (t);
5692 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5693 > GET_MODE_SIZE (TYPE_MODE (type)))
5694 ? wide_type : type);
5696 int same_p = tcode == code;
5697 tree op0 = NULL_TREE, op1 = NULL_TREE;
5698 bool sub_strict_overflow_p;
5700 /* Don't deal with constants of zero here; they confuse the code below. */
5701 if (integer_zerop (c))
5704 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5705 op0 = TREE_OPERAND (t, 0);
5707 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5708 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5710 /* Note that we need not handle conditional operations here since fold
5711 already handles those cases. So just do arithmetic here. */
5715 /* For a constant, we can always simplify if we are a multiply
5716 or (for divide and modulus) if it is a multiple of our constant. */
5717 if (code == MULT_EXPR
5718 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5719 return const_binop (code, fold_convert (ctype, t),
5720 fold_convert (ctype, c), 0);
5723 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5724 /* If op0 is an expression ... */
5725 if ((COMPARISON_CLASS_P (op0)
5726 || UNARY_CLASS_P (op0)
5727 || BINARY_CLASS_P (op0)
5728 || VL_EXP_CLASS_P (op0)
5729 || EXPRESSION_CLASS_P (op0))
5730 /* ... and is unsigned, and its type is smaller than ctype,
5731 then we cannot pass through as widening. */
5732 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5733 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5734 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5735 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5736 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5737 /* ... or this is a truncation (t is narrower than op0),
5738 then we cannot pass through this narrowing. */
5739 || (GET_MODE_SIZE (TYPE_MODE (type))
5740 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5741 /* ... or signedness changes for division or modulus,
5742 then we cannot pass through this conversion. */
5743 || (code != MULT_EXPR
5744 && (TYPE_UNSIGNED (ctype)
5745 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5746 /* ... or has undefined overflow while the converted to
5747 type has not, we cannot do the operation in the inner type
5748 as that would introduce undefined overflow. */
5749 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5750 && !TYPE_OVERFLOW_UNDEFINED (type))))
5753 /* Pass the constant down and see if we can make a simplification. If
5754 we can, replace this expression with the inner simplification for
5755 possible later conversion to our or some other type. */
5756 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5757 && TREE_CODE (t2) == INTEGER_CST
5758 && !TREE_OVERFLOW (t2)
5759 && (0 != (t1 = extract_muldiv (op0, t2, code,
5761 ? ctype : NULL_TREE,
5762 strict_overflow_p))))
5767 /* If widening the type changes it from signed to unsigned, then we
5768 must avoid building ABS_EXPR itself as unsigned. */
5769 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5771 tree cstype = (*signed_type_for) (ctype);
5772 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5775 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5776 return fold_convert (ctype, t1);
5780 /* If the constant is negative, we cannot simplify this. */
5781 if (tree_int_cst_sgn (c) == -1)
5785 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5787 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5790 case MIN_EXPR: case MAX_EXPR:
5791 /* If widening the type changes the signedness, then we can't perform
5792 this optimization as that changes the result. */
5793 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5796 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5797 sub_strict_overflow_p = false;
5798 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5799 &sub_strict_overflow_p)) != 0
5800 && (t2 = extract_muldiv (op1, c, code, wide_type,
5801 &sub_strict_overflow_p)) != 0)
5803 if (tree_int_cst_sgn (c) < 0)
5804 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5805 if (sub_strict_overflow_p)
5806 *strict_overflow_p = true;
5807 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5808 fold_convert (ctype, t2));
5812 case LSHIFT_EXPR: case RSHIFT_EXPR:
5813 /* If the second operand is constant, this is a multiplication
5814 or floor division, by a power of two, so we can treat it that
5815 way unless the multiplier or divisor overflows. Signed
5816 left-shift overflow is implementation-defined rather than
5817 undefined in C90, so do not convert signed left shift into
5819 if (TREE_CODE (op1) == INTEGER_CST
5820 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5821 /* const_binop may not detect overflow correctly,
5822 so check for it explicitly here. */
5823 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5824 && TREE_INT_CST_HIGH (op1) == 0
5825 && 0 != (t1 = fold_convert (ctype,
5826 const_binop (LSHIFT_EXPR,
5829 && !TREE_OVERFLOW (t1))
5830 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5831 ? MULT_EXPR : FLOOR_DIV_EXPR,
5832 ctype, fold_convert (ctype, op0), t1),
5833 c, code, wide_type, strict_overflow_p);
5836 case PLUS_EXPR: case MINUS_EXPR:
5837 /* See if we can eliminate the operation on both sides. If we can, we
5838 can return a new PLUS or MINUS. If we can't, the only remaining
5839 cases where we can do anything are if the second operand is a
5841 sub_strict_overflow_p = false;
5842 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5843 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5844 if (t1 != 0 && t2 != 0
5845 && (code == MULT_EXPR
5846 /* If not multiplication, we can only do this if both operands
5847 are divisible by c. */
5848 || (multiple_of_p (ctype, op0, c)
5849 && multiple_of_p (ctype, op1, c))))
5851 if (sub_strict_overflow_p)
5852 *strict_overflow_p = true;
5853 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5854 fold_convert (ctype, t2));
5857 /* If this was a subtraction, negate OP1 and set it to be an addition.
5858 This simplifies the logic below. */
5859 if (tcode == MINUS_EXPR)
5860 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5862 if (TREE_CODE (op1) != INTEGER_CST)
5865 /* If either OP1 or C are negative, this optimization is not safe for
5866 some of the division and remainder types while for others we need
5867 to change the code. */
5868 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5870 if (code == CEIL_DIV_EXPR)
5871 code = FLOOR_DIV_EXPR;
5872 else if (code == FLOOR_DIV_EXPR)
5873 code = CEIL_DIV_EXPR;
5874 else if (code != MULT_EXPR
5875 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5879 /* If it's a multiply or a division/modulus operation of a multiple
5880 of our constant, do the operation and verify it doesn't overflow. */
5881 if (code == MULT_EXPR
5882 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5884 op1 = const_binop (code, fold_convert (ctype, op1),
5885 fold_convert (ctype, c), 0);
5886 /* We allow the constant to overflow with wrapping semantics. */
5888 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5894 /* If we have an unsigned type is not a sizetype, we cannot widen
5895 the operation since it will change the result if the original
5896 computation overflowed. */
5897 if (TYPE_UNSIGNED (ctype)
5898 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5902 /* If we were able to eliminate our operation from the first side,
5903 apply our operation to the second side and reform the PLUS. */
5904 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5905 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5907 /* The last case is if we are a multiply. In that case, we can
5908 apply the distributive law to commute the multiply and addition
5909 if the multiplication of the constants doesn't overflow. */
5910 if (code == MULT_EXPR)
5911 return fold_build2 (tcode, ctype,
5912 fold_build2 (code, ctype,
5913 fold_convert (ctype, op0),
5914 fold_convert (ctype, c)),
5920 /* We have a special case here if we are doing something like
5921 (C * 8) % 4 since we know that's zero. */
5922 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5923 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5924 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5925 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5926 return omit_one_operand (type, integer_zero_node, op0);
5928 /* ... fall through ... */
5930 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5931 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5932 /* If we can extract our operation from the LHS, do so and return a
5933 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5934 do something only if the second operand is a constant. */
5936 && (t1 = extract_muldiv (op0, c, code, wide_type,
5937 strict_overflow_p)) != 0)
5938 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5939 fold_convert (ctype, op1));
5940 else if (tcode == MULT_EXPR && code == MULT_EXPR
5941 && (t1 = extract_muldiv (op1, c, code, wide_type,
5942 strict_overflow_p)) != 0)
5943 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5944 fold_convert (ctype, t1));
5945 else if (TREE_CODE (op1) != INTEGER_CST)
5948 /* If these are the same operation types, we can associate them
5949 assuming no overflow. */
5951 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5952 fold_convert (ctype, c), 0))
5953 && !TREE_OVERFLOW (t1))
5954 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5956 /* If these operations "cancel" each other, we have the main
5957 optimizations of this pass, which occur when either constant is a
5958 multiple of the other, in which case we replace this with either an
5959 operation or CODE or TCODE.
5961 If we have an unsigned type that is not a sizetype, we cannot do
5962 this since it will change the result if the original computation
5964 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5965 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5966 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5967 || (tcode == MULT_EXPR
5968 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5969 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5970 && code != MULT_EXPR)))
5972 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5974 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5975 *strict_overflow_p = true;
5976 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5977 fold_convert (ctype,
5978 const_binop (TRUNC_DIV_EXPR,
5981 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5983 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5984 *strict_overflow_p = true;
5985 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5986 fold_convert (ctype,
5987 const_binop (TRUNC_DIV_EXPR,
6000 /* Return a node which has the indicated constant VALUE (either 0 or
6001 1), and is of the indicated TYPE. */
6004 constant_boolean_node (int value, tree type)
6006 if (type == integer_type_node)
6007 return value ? integer_one_node : integer_zero_node;
6008 else if (type == boolean_type_node)
6009 return value ? boolean_true_node : boolean_false_node;
6011 return build_int_cst (type, value);
6015 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6016 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6017 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6018 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6019 COND is the first argument to CODE; otherwise (as in the example
6020 given here), it is the second argument. TYPE is the type of the
6021 original expression. Return NULL_TREE if no simplification is
6025 fold_binary_op_with_conditional_arg (enum tree_code code,
6026 tree type, tree op0, tree op1,
6027 tree cond, tree arg, int cond_first_p)
6029 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6030 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6031 tree test, true_value, false_value;
6032 tree lhs = NULL_TREE;
6033 tree rhs = NULL_TREE;
6035 /* This transformation is only worthwhile if we don't have to wrap
6036 arg in a SAVE_EXPR, and the operation can be simplified on at least
6037 one of the branches once its pushed inside the COND_EXPR. */
6038 if (!TREE_CONSTANT (arg))
6041 if (TREE_CODE (cond) == COND_EXPR)
6043 test = TREE_OPERAND (cond, 0);
6044 true_value = TREE_OPERAND (cond, 1);
6045 false_value = TREE_OPERAND (cond, 2);
6046 /* If this operand throws an expression, then it does not make
6047 sense to try to perform a logical or arithmetic operation
6049 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6051 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6056 tree testtype = TREE_TYPE (cond);
6058 true_value = constant_boolean_node (true, testtype);
6059 false_value = constant_boolean_node (false, testtype);
6062 arg = fold_convert (arg_type, arg);
6065 true_value = fold_convert (cond_type, true_value);
6067 lhs = fold_build2 (code, type, true_value, arg);
6069 lhs = fold_build2 (code, type, arg, true_value);
6073 false_value = fold_convert (cond_type, false_value);
6075 rhs = fold_build2 (code, type, false_value, arg);
6077 rhs = fold_build2 (code, type, arg, false_value);
6080 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6081 return fold_convert (type, test);
6085 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6087 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6088 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6089 ADDEND is the same as X.
6091 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6092 and finite. The problematic cases are when X is zero, and its mode
6093 has signed zeros. In the case of rounding towards -infinity,
6094 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6095 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6098 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6100 if (!real_zerop (addend))
6103 /* Don't allow the fold with -fsignaling-nans. */
6104 if (HONOR_SNANS (TYPE_MODE (type)))
6107 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6108 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6111 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6112 if (TREE_CODE (addend) == REAL_CST
6113 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6116 /* The mode has signed zeros, and we have to honor their sign.
6117 In this situation, there is only one case we can return true for.
6118 X - 0 is the same as X unless rounding towards -infinity is
6120 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6123 /* Subroutine of fold() that checks comparisons of built-in math
6124 functions against real constants.
6126 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6127 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6128 is the type of the result and ARG0 and ARG1 are the operands of the
6129 comparison. ARG1 must be a TREE_REAL_CST.
6131 The function returns the constant folded tree if a simplification
6132 can be made, and NULL_TREE otherwise. */
6135 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6136 tree type, tree arg0, tree arg1)
6140 if (BUILTIN_SQRT_P (fcode))
6142 tree arg = CALL_EXPR_ARG (arg0, 0);
6143 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6145 c = TREE_REAL_CST (arg1);
6146 if (REAL_VALUE_NEGATIVE (c))
6148 /* sqrt(x) < y is always false, if y is negative. */
6149 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6150 return omit_one_operand (type, integer_zero_node, arg);
6152 /* sqrt(x) > y is always true, if y is negative and we
6153 don't care about NaNs, i.e. negative values of x. */
6154 if (code == NE_EXPR || !HONOR_NANS (mode))
6155 return omit_one_operand (type, integer_one_node, arg);
6157 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6158 return fold_build2 (GE_EXPR, type, arg,
6159 build_real (TREE_TYPE (arg), dconst0));
6161 else if (code == GT_EXPR || code == GE_EXPR)
6165 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6166 real_convert (&c2, mode, &c2);
6168 if (REAL_VALUE_ISINF (c2))
6170 /* sqrt(x) > y is x == +Inf, when y is very large. */
6171 if (HONOR_INFINITIES (mode))
6172 return fold_build2 (EQ_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg), c2));
6175 /* sqrt(x) > y is always false, when y is very large
6176 and we don't care about infinities. */
6177 return omit_one_operand (type, integer_zero_node, arg);
6180 /* sqrt(x) > c is the same as x > c*c. */
6181 return fold_build2 (code, type, arg,
6182 build_real (TREE_TYPE (arg), c2));
6184 else if (code == LT_EXPR || code == LE_EXPR)
6188 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6189 real_convert (&c2, mode, &c2);
6191 if (REAL_VALUE_ISINF (c2))
6193 /* sqrt(x) < y is always true, when y is a very large
6194 value and we don't care about NaNs or Infinities. */
6195 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6196 return omit_one_operand (type, integer_one_node, arg);
6198 /* sqrt(x) < y is x != +Inf when y is very large and we
6199 don't care about NaNs. */
6200 if (! HONOR_NANS (mode))
6201 return fold_build2 (NE_EXPR, type, arg,
6202 build_real (TREE_TYPE (arg), c2));
6204 /* sqrt(x) < y is x >= 0 when y is very large and we
6205 don't care about Infinities. */
6206 if (! HONOR_INFINITIES (mode))
6207 return fold_build2 (GE_EXPR, type, arg,
6208 build_real (TREE_TYPE (arg), dconst0));
6210 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6211 if (lang_hooks.decls.global_bindings_p () != 0
6212 || CONTAINS_PLACEHOLDER_P (arg))
6215 arg = save_expr (arg);
6216 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6217 fold_build2 (GE_EXPR, type, arg,
6218 build_real (TREE_TYPE (arg),
6220 fold_build2 (NE_EXPR, type, arg,
6221 build_real (TREE_TYPE (arg),
6225 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6226 if (! HONOR_NANS (mode))
6227 return fold_build2 (code, type, arg,
6228 build_real (TREE_TYPE (arg), c2));
6230 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6231 if (lang_hooks.decls.global_bindings_p () == 0
6232 && ! CONTAINS_PLACEHOLDER_P (arg))
6234 arg = save_expr (arg);
6235 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6236 fold_build2 (GE_EXPR, type, arg,
6237 build_real (TREE_TYPE (arg),
6239 fold_build2 (code, type, arg,
6240 build_real (TREE_TYPE (arg),
6249 /* Subroutine of fold() that optimizes comparisons against Infinities,
6250 either +Inf or -Inf.
6252 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6253 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6254 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6256 The function returns the constant folded tree if a simplification
6257 can be made, and NULL_TREE otherwise. */
6260 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6262 enum machine_mode mode;
6263 REAL_VALUE_TYPE max;
6267 mode = TYPE_MODE (TREE_TYPE (arg0));
6269 /* For negative infinity swap the sense of the comparison. */
6270 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6272 code = swap_tree_comparison (code);
6277 /* x > +Inf is always false, if with ignore sNANs. */
6278 if (HONOR_SNANS (mode))
6280 return omit_one_operand (type, integer_zero_node, arg0);
6283 /* x <= +Inf is always true, if we don't case about NaNs. */
6284 if (! HONOR_NANS (mode))
6285 return omit_one_operand (type, integer_one_node, arg0);
6287 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6288 if (lang_hooks.decls.global_bindings_p () == 0
6289 && ! CONTAINS_PLACEHOLDER_P (arg0))
6291 arg0 = save_expr (arg0);
6292 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6298 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6299 real_maxval (&max, neg, mode);
6300 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6301 arg0, build_real (TREE_TYPE (arg0), max));
6304 /* x < +Inf is always equal to x <= DBL_MAX. */
6305 real_maxval (&max, neg, mode);
6306 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6307 arg0, build_real (TREE_TYPE (arg0), max));
6310 /* x != +Inf is always equal to !(x > DBL_MAX). */
6311 real_maxval (&max, neg, mode);
6312 if (! HONOR_NANS (mode))
6313 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6314 arg0, build_real (TREE_TYPE (arg0), max));
6316 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6317 arg0, build_real (TREE_TYPE (arg0), max));
6318 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6327 /* Subroutine of fold() that optimizes comparisons of a division by
6328 a nonzero integer constant against an integer constant, i.e.
6331 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6332 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6333 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6335 The function returns the constant folded tree if a simplification
6336 can be made, and NULL_TREE otherwise. */
6339 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6341 tree prod, tmp, hi, lo;
6342 tree arg00 = TREE_OPERAND (arg0, 0);
6343 tree arg01 = TREE_OPERAND (arg0, 1);
6344 unsigned HOST_WIDE_INT lpart;
6345 HOST_WIDE_INT hpart;
6346 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6350 /* We have to do this the hard way to detect unsigned overflow.
6351 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6352 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6353 TREE_INT_CST_HIGH (arg01),
6354 TREE_INT_CST_LOW (arg1),
6355 TREE_INT_CST_HIGH (arg1),
6356 &lpart, &hpart, unsigned_p);
6357 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6359 neg_overflow = false;
6363 tmp = int_const_binop (MINUS_EXPR, arg01,
6364 build_int_cst (TREE_TYPE (arg01), 1), 0);
6367 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6368 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6369 TREE_INT_CST_HIGH (prod),
6370 TREE_INT_CST_LOW (tmp),
6371 TREE_INT_CST_HIGH (tmp),
6372 &lpart, &hpart, unsigned_p);
6373 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6374 -1, overflow | TREE_OVERFLOW (prod));
6376 else if (tree_int_cst_sgn (arg01) >= 0)
6378 tmp = int_const_binop (MINUS_EXPR, arg01,
6379 build_int_cst (TREE_TYPE (arg01), 1), 0);
6380 switch (tree_int_cst_sgn (arg1))
6383 neg_overflow = true;
6384 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6389 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6394 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6404 /* A negative divisor reverses the relational operators. */
6405 code = swap_tree_comparison (code);
6407 tmp = int_const_binop (PLUS_EXPR, arg01,
6408 build_int_cst (TREE_TYPE (arg01), 1), 0);
6409 switch (tree_int_cst_sgn (arg1))
6412 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6417 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6422 neg_overflow = true;
6423 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6435 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6436 return omit_one_operand (type, integer_zero_node, arg00);
6437 if (TREE_OVERFLOW (hi))
6438 return fold_build2 (GE_EXPR, type, arg00, lo);
6439 if (TREE_OVERFLOW (lo))
6440 return fold_build2 (LE_EXPR, type, arg00, hi);
6441 return build_range_check (type, arg00, 1, lo, hi);
6444 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6445 return omit_one_operand (type, integer_one_node, arg00);
6446 if (TREE_OVERFLOW (hi))
6447 return fold_build2 (LT_EXPR, type, arg00, lo);
6448 if (TREE_OVERFLOW (lo))
6449 return fold_build2 (GT_EXPR, type, arg00, hi);
6450 return build_range_check (type, arg00, 0, lo, hi);
6453 if (TREE_OVERFLOW (lo))
6455 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6456 return omit_one_operand (type, tmp, arg00);
6458 return fold_build2 (LT_EXPR, type, arg00, lo);
6461 if (TREE_OVERFLOW (hi))
6463 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6464 return omit_one_operand (type, tmp, arg00);
6466 return fold_build2 (LE_EXPR, type, arg00, hi);
6469 if (TREE_OVERFLOW (hi))
6471 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6472 return omit_one_operand (type, tmp, arg00);
6474 return fold_build2 (GT_EXPR, type, arg00, hi);
6477 if (TREE_OVERFLOW (lo))
6479 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6480 return omit_one_operand (type, tmp, arg00);
6482 return fold_build2 (GE_EXPR, type, arg00, lo);
6492 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6493 equality/inequality test, then return a simplified form of the test
6494 using a sign testing. Otherwise return NULL. TYPE is the desired
6498 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6501 /* If this is testing a single bit, we can optimize the test. */
6502 if ((code == NE_EXPR || code == EQ_EXPR)
6503 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6504 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6506 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6507 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6508 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6510 if (arg00 != NULL_TREE
6511 /* This is only a win if casting to a signed type is cheap,
6512 i.e. when arg00's type is not a partial mode. */
6513 && TYPE_PRECISION (TREE_TYPE (arg00))
6514 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6516 tree stype = signed_type_for (TREE_TYPE (arg00));
6517 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6518 result_type, fold_convert (stype, arg00),
6519 build_int_cst (stype, 0));
6526 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6527 equality/inequality test, then return a simplified form of
6528 the test using shifts and logical operations. Otherwise return
6529 NULL. TYPE is the desired result type. */
6532 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6535 /* If this is testing a single bit, we can optimize the test. */
6536 if ((code == NE_EXPR || code == EQ_EXPR)
6537 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6538 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6540 tree inner = TREE_OPERAND (arg0, 0);
6541 tree type = TREE_TYPE (arg0);
6542 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6543 enum machine_mode operand_mode = TYPE_MODE (type);
6545 tree signed_type, unsigned_type, intermediate_type;
6548 /* First, see if we can fold the single bit test into a sign-bit
6550 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6555 /* Otherwise we have (A & C) != 0 where C is a single bit,
6556 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6557 Similarly for (A & C) == 0. */
6559 /* If INNER is a right shift of a constant and it plus BITNUM does
6560 not overflow, adjust BITNUM and INNER. */
6561 if (TREE_CODE (inner) == RSHIFT_EXPR
6562 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6563 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6564 && bitnum < TYPE_PRECISION (type)
6565 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6566 bitnum - TYPE_PRECISION (type)))
6568 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6569 inner = TREE_OPERAND (inner, 0);
6572 /* If we are going to be able to omit the AND below, we must do our
6573 operations as unsigned. If we must use the AND, we have a choice.
6574 Normally unsigned is faster, but for some machines signed is. */
6575 #ifdef LOAD_EXTEND_OP
6576 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6577 && !flag_syntax_only) ? 0 : 1;
6582 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6583 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6584 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6585 inner = fold_convert (intermediate_type, inner);
6588 inner = build2 (RSHIFT_EXPR, intermediate_type,
6589 inner, size_int (bitnum));
6591 one = build_int_cst (intermediate_type, 1);
6593 if (code == EQ_EXPR)
6594 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6596 /* Put the AND last so it can combine with more things. */
6597 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6599 /* Make sure to return the proper type. */
6600 inner = fold_convert (result_type, inner);
6607 /* Check whether we are allowed to reorder operands arg0 and arg1,
6608 such that the evaluation of arg1 occurs before arg0. */
6611 reorder_operands_p (const_tree arg0, const_tree arg1)
6613 if (! flag_evaluation_order)
6615 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6617 return ! TREE_SIDE_EFFECTS (arg0)
6618 && ! TREE_SIDE_EFFECTS (arg1);
6621 /* Test whether it is preferable two swap two operands, ARG0 and
6622 ARG1, for example because ARG0 is an integer constant and ARG1
6623 isn't. If REORDER is true, only recommend swapping if we can
6624 evaluate the operands in reverse order. */
6627 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6629 STRIP_SIGN_NOPS (arg0);
6630 STRIP_SIGN_NOPS (arg1);
6632 if (TREE_CODE (arg1) == INTEGER_CST)
6634 if (TREE_CODE (arg0) == INTEGER_CST)
6637 if (TREE_CODE (arg1) == REAL_CST)
6639 if (TREE_CODE (arg0) == REAL_CST)
6642 if (TREE_CODE (arg1) == FIXED_CST)
6644 if (TREE_CODE (arg0) == FIXED_CST)
6647 if (TREE_CODE (arg1) == COMPLEX_CST)
6649 if (TREE_CODE (arg0) == COMPLEX_CST)
6652 if (TREE_CONSTANT (arg1))
6654 if (TREE_CONSTANT (arg0))
6660 if (reorder && flag_evaluation_order
6661 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6664 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6665 for commutative and comparison operators. Ensuring a canonical
6666 form allows the optimizers to find additional redundancies without
6667 having to explicitly check for both orderings. */
6668 if (TREE_CODE (arg0) == SSA_NAME
6669 && TREE_CODE (arg1) == SSA_NAME
6670 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6673 /* Put SSA_NAMEs last. */
6674 if (TREE_CODE (arg1) == SSA_NAME)
6676 if (TREE_CODE (arg0) == SSA_NAME)
6679 /* Put variables last. */
6688 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6689 ARG0 is extended to a wider type. */
6692 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6694 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6696 tree shorter_type, outer_type;
6700 if (arg0_unw == arg0)
6702 shorter_type = TREE_TYPE (arg0_unw);
6704 #ifdef HAVE_canonicalize_funcptr_for_compare
6705 /* Disable this optimization if we're casting a function pointer
6706 type on targets that require function pointer canonicalization. */
6707 if (HAVE_canonicalize_funcptr_for_compare
6708 && TREE_CODE (shorter_type) == POINTER_TYPE
6709 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6713 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6716 arg1_unw = get_unwidened (arg1, NULL_TREE);
6718 /* If possible, express the comparison in the shorter mode. */
6719 if ((code == EQ_EXPR || code == NE_EXPR
6720 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6721 && (TREE_TYPE (arg1_unw) == shorter_type
6722 || (TYPE_PRECISION (shorter_type)
6723 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6724 || (TREE_CODE (arg1_unw) == INTEGER_CST
6725 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6726 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6727 && int_fits_type_p (arg1_unw, shorter_type))))
6728 return fold_build2 (code, type, arg0_unw,
6729 fold_convert (shorter_type, arg1_unw));
6731 if (TREE_CODE (arg1_unw) != INTEGER_CST
6732 || TREE_CODE (shorter_type) != INTEGER_TYPE
6733 || !int_fits_type_p (arg1_unw, shorter_type))
6736 /* If we are comparing with the integer that does not fit into the range
6737 of the shorter type, the result is known. */
6738 outer_type = TREE_TYPE (arg1_unw);
6739 min = lower_bound_in_type (outer_type, shorter_type);
6740 max = upper_bound_in_type (outer_type, shorter_type);
6742 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6744 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6751 return omit_one_operand (type, integer_zero_node, arg0);
6756 return omit_one_operand (type, integer_one_node, arg0);
6762 return omit_one_operand (type, integer_one_node, arg0);
6764 return omit_one_operand (type, integer_zero_node, arg0);
6769 return omit_one_operand (type, integer_zero_node, arg0);
6771 return omit_one_operand (type, integer_one_node, arg0);
6780 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6781 ARG0 just the signedness is changed. */
6784 fold_sign_changed_comparison (enum tree_code code, tree type,
6785 tree arg0, tree arg1)
6788 tree inner_type, outer_type;
6790 if (TREE_CODE (arg0) != NOP_EXPR
6791 && TREE_CODE (arg0) != CONVERT_EXPR)
6794 outer_type = TREE_TYPE (arg0);
6795 arg0_inner = TREE_OPERAND (arg0, 0);
6796 inner_type = TREE_TYPE (arg0_inner);
6798 #ifdef HAVE_canonicalize_funcptr_for_compare
6799 /* Disable this optimization if we're casting a function pointer
6800 type on targets that require function pointer canonicalization. */
6801 if (HAVE_canonicalize_funcptr_for_compare
6802 && TREE_CODE (inner_type) == POINTER_TYPE
6803 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6807 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6810 /* If the conversion is from an integral subtype to its basetype
6812 if (TREE_TYPE (inner_type) == outer_type)
6815 if (TREE_CODE (arg1) != INTEGER_CST
6816 && !((TREE_CODE (arg1) == NOP_EXPR
6817 || TREE_CODE (arg1) == CONVERT_EXPR)
6818 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6821 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 if (TREE_CODE (arg1) == INTEGER_CST)
6827 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6828 TREE_INT_CST_HIGH (arg1), 0,
6829 TREE_OVERFLOW (arg1));
6831 arg1 = fold_convert (inner_type, arg1);
6833 return fold_build2 (code, type, arg0_inner, arg1);
6836 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6837 step of the array. Reconstructs s and delta in the case of s * delta
6838 being an integer constant (and thus already folded).
6839 ADDR is the address. MULT is the multiplicative expression.
6840 If the function succeeds, the new address expression is returned. Otherwise
6841 NULL_TREE is returned. */
6844 try_move_mult_to_index (tree addr, tree op1)
6846 tree s, delta, step;
6847 tree ref = TREE_OPERAND (addr, 0), pref;
6852 /* Strip the nops that might be added when converting op1 to sizetype. */
6855 /* Canonicalize op1 into a possibly non-constant delta
6856 and an INTEGER_CST s. */
6857 if (TREE_CODE (op1) == MULT_EXPR)
6859 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6864 if (TREE_CODE (arg0) == INTEGER_CST)
6869 else if (TREE_CODE (arg1) == INTEGER_CST)
6877 else if (TREE_CODE (op1) == INTEGER_CST)
6884 /* Simulate we are delta * 1. */
6886 s = integer_one_node;
6889 for (;; ref = TREE_OPERAND (ref, 0))
6891 if (TREE_CODE (ref) == ARRAY_REF)
6893 /* Remember if this was a multi-dimensional array. */
6894 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6897 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6901 step = array_ref_element_size (ref);
6902 if (TREE_CODE (step) != INTEGER_CST)
6907 if (! tree_int_cst_equal (step, s))
6912 /* Try if delta is a multiple of step. */
6913 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6919 /* Only fold here if we can verify we do not overflow one
6920 dimension of a multi-dimensional array. */
6925 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6926 || !INTEGRAL_TYPE_P (itype)
6927 || !TYPE_MAX_VALUE (itype)
6928 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6931 tmp = fold_binary (PLUS_EXPR, itype,
6932 fold_convert (itype,
6933 TREE_OPERAND (ref, 1)),
6934 fold_convert (itype, delta));
6936 || TREE_CODE (tmp) != INTEGER_CST
6937 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6946 if (!handled_component_p (ref))
6950 /* We found the suitable array reference. So copy everything up to it,
6951 and replace the index. */
6953 pref = TREE_OPERAND (addr, 0);
6954 ret = copy_node (pref);
6959 pref = TREE_OPERAND (pref, 0);
6960 TREE_OPERAND (pos, 0) = copy_node (pref);
6961 pos = TREE_OPERAND (pos, 0);
6964 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6965 fold_convert (itype,
6966 TREE_OPERAND (pos, 1)),
6967 fold_convert (itype, delta));
6969 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6973 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6974 means A >= Y && A != MAX, but in this case we know that
6975 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6978 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6980 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6982 if (TREE_CODE (bound) == LT_EXPR)
6983 a = TREE_OPERAND (bound, 0);
6984 else if (TREE_CODE (bound) == GT_EXPR)
6985 a = TREE_OPERAND (bound, 1);
6989 typea = TREE_TYPE (a);
6990 if (!INTEGRAL_TYPE_P (typea)
6991 && !POINTER_TYPE_P (typea))
6994 if (TREE_CODE (ineq) == LT_EXPR)
6996 a1 = TREE_OPERAND (ineq, 1);
6997 y = TREE_OPERAND (ineq, 0);
6999 else if (TREE_CODE (ineq) == GT_EXPR)
7001 a1 = TREE_OPERAND (ineq, 0);
7002 y = TREE_OPERAND (ineq, 1);
7007 if (TREE_TYPE (a1) != typea)
7010 if (POINTER_TYPE_P (typea))
7012 /* Convert the pointer types into integer before taking the difference. */
7013 tree ta = fold_convert (ssizetype, a);
7014 tree ta1 = fold_convert (ssizetype, a1);
7015 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7018 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7020 if (!diff || !integer_onep (diff))
7023 return fold_build2 (GE_EXPR, type, a, y);
7026 /* Fold a sum or difference of at least one multiplication.
7027 Returns the folded tree or NULL if no simplification could be made. */
7030 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7032 tree arg00, arg01, arg10, arg11;
7033 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7035 /* (A * C) +- (B * C) -> (A+-B) * C.
7036 (A * C) +- A -> A * (C+-1).
7037 We are most concerned about the case where C is a constant,
7038 but other combinations show up during loop reduction. Since
7039 it is not difficult, try all four possibilities. */
7041 if (TREE_CODE (arg0) == MULT_EXPR)
7043 arg00 = TREE_OPERAND (arg0, 0);
7044 arg01 = TREE_OPERAND (arg0, 1);
7046 else if (TREE_CODE (arg0) == INTEGER_CST)
7048 arg00 = build_one_cst (type);
7053 /* We cannot generate constant 1 for fract. */
7054 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7057 arg01 = build_one_cst (type);
7059 if (TREE_CODE (arg1) == MULT_EXPR)
7061 arg10 = TREE_OPERAND (arg1, 0);
7062 arg11 = TREE_OPERAND (arg1, 1);
7064 else if (TREE_CODE (arg1) == INTEGER_CST)
7066 arg10 = build_one_cst (type);
7071 /* We cannot generate constant 1 for fract. */
7072 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7075 arg11 = build_one_cst (type);
7079 if (operand_equal_p (arg01, arg11, 0))
7080 same = arg01, alt0 = arg00, alt1 = arg10;
7081 else if (operand_equal_p (arg00, arg10, 0))
7082 same = arg00, alt0 = arg01, alt1 = arg11;
7083 else if (operand_equal_p (arg00, arg11, 0))
7084 same = arg00, alt0 = arg01, alt1 = arg10;
7085 else if (operand_equal_p (arg01, arg10, 0))
7086 same = arg01, alt0 = arg00, alt1 = arg11;
7088 /* No identical multiplicands; see if we can find a common
7089 power-of-two factor in non-power-of-two multiplies. This
7090 can help in multi-dimensional array access. */
7091 else if (host_integerp (arg01, 0)
7092 && host_integerp (arg11, 0))
7094 HOST_WIDE_INT int01, int11, tmp;
7097 int01 = TREE_INT_CST_LOW (arg01);
7098 int11 = TREE_INT_CST_LOW (arg11);
7100 /* Move min of absolute values to int11. */
7101 if ((int01 >= 0 ? int01 : -int01)
7102 < (int11 >= 0 ? int11 : -int11))
7104 tmp = int01, int01 = int11, int11 = tmp;
7105 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7112 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7114 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7115 build_int_cst (TREE_TYPE (arg00),
7120 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7125 return fold_build2 (MULT_EXPR, type,
7126 fold_build2 (code, type,
7127 fold_convert (type, alt0),
7128 fold_convert (type, alt1)),
7129 fold_convert (type, same));
7134 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7135 specified by EXPR into the buffer PTR of length LEN bytes.
7136 Return the number of bytes placed in the buffer, or zero
7140 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7142 tree type = TREE_TYPE (expr);
7143 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7144 int byte, offset, word, words;
7145 unsigned char value;
7147 if (total_bytes > len)
7149 words = total_bytes / UNITS_PER_WORD;
7151 for (byte = 0; byte < total_bytes; byte++)
7153 int bitpos = byte * BITS_PER_UNIT;
7154 if (bitpos < HOST_BITS_PER_WIDE_INT)
7155 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7157 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7158 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7160 if (total_bytes > UNITS_PER_WORD)
7162 word = byte / UNITS_PER_WORD;
7163 if (WORDS_BIG_ENDIAN)
7164 word = (words - 1) - word;
7165 offset = word * UNITS_PER_WORD;
7166 if (BYTES_BIG_ENDIAN)
7167 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7169 offset += byte % UNITS_PER_WORD;
7172 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7173 ptr[offset] = value;
7179 /* Subroutine of native_encode_expr. Encode the REAL_CST
7180 specified by EXPR into the buffer PTR of length LEN bytes.
7181 Return the number of bytes placed in the buffer, or zero
7185 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7187 tree type = TREE_TYPE (expr);
7188 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7189 int byte, offset, word, words, bitpos;
7190 unsigned char value;
7192 /* There are always 32 bits in each long, no matter the size of
7193 the hosts long. We handle floating point representations with
7197 if (total_bytes > len)
7199 words = 32 / UNITS_PER_WORD;
7201 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7203 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7204 bitpos += BITS_PER_UNIT)
7206 byte = (bitpos / BITS_PER_UNIT) & 3;
7207 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7209 if (UNITS_PER_WORD < 4)
7211 word = byte / UNITS_PER_WORD;
7212 if (WORDS_BIG_ENDIAN)
7213 word = (words - 1) - word;
7214 offset = word * UNITS_PER_WORD;
7215 if (BYTES_BIG_ENDIAN)
7216 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7218 offset += byte % UNITS_PER_WORD;
7221 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7222 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7227 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7228 specified by EXPR into the buffer PTR of length LEN bytes.
7229 Return the number of bytes placed in the buffer, or zero
7233 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7238 part = TREE_REALPART (expr);
7239 rsize = native_encode_expr (part, ptr, len);
7242 part = TREE_IMAGPART (expr);
7243 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7246 return rsize + isize;
7250 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7251 specified by EXPR into the buffer PTR of length LEN bytes.
7252 Return the number of bytes placed in the buffer, or zero
7256 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7258 int i, size, offset, count;
7259 tree itype, elem, elements;
7262 elements = TREE_VECTOR_CST_ELTS (expr);
7263 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7264 itype = TREE_TYPE (TREE_TYPE (expr));
7265 size = GET_MODE_SIZE (TYPE_MODE (itype));
7266 for (i = 0; i < count; i++)
7270 elem = TREE_VALUE (elements);
7271 elements = TREE_CHAIN (elements);
7278 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7283 if (offset + size > len)
7285 memset (ptr+offset, 0, size);
7293 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7294 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7295 buffer PTR of length LEN bytes. Return the number of bytes
7296 placed in the buffer, or zero upon failure. */
7299 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7301 switch (TREE_CODE (expr))
7304 return native_encode_int (expr, ptr, len);
7307 return native_encode_real (expr, ptr, len);
7310 return native_encode_complex (expr, ptr, len);
7313 return native_encode_vector (expr, ptr, len);
7321 /* Subroutine of native_interpret_expr. Interpret the contents of
7322 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7323 If the buffer cannot be interpreted, return NULL_TREE. */
7326 native_interpret_int (tree type, const unsigned char *ptr, int len)
7328 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7329 int byte, offset, word, words;
7330 unsigned char value;
7331 unsigned int HOST_WIDE_INT lo = 0;
7332 HOST_WIDE_INT hi = 0;
7334 if (total_bytes > len)
7336 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7338 words = total_bytes / UNITS_PER_WORD;
7340 for (byte = 0; byte < total_bytes; byte++)
7342 int bitpos = byte * BITS_PER_UNIT;
7343 if (total_bytes > UNITS_PER_WORD)
7345 word = byte / UNITS_PER_WORD;
7346 if (WORDS_BIG_ENDIAN)
7347 word = (words - 1) - word;
7348 offset = word * UNITS_PER_WORD;
7349 if (BYTES_BIG_ENDIAN)
7350 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7352 offset += byte % UNITS_PER_WORD;
7355 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7356 value = ptr[offset];
7358 if (bitpos < HOST_BITS_PER_WIDE_INT)
7359 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7361 hi |= (unsigned HOST_WIDE_INT) value
7362 << (bitpos - HOST_BITS_PER_WIDE_INT);
7365 return build_int_cst_wide_type (type, lo, hi);
7369 /* Subroutine of native_interpret_expr. Interpret the contents of
7370 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7371 If the buffer cannot be interpreted, return NULL_TREE. */
7374 native_interpret_real (tree type, const unsigned char *ptr, int len)
7376 enum machine_mode mode = TYPE_MODE (type);
7377 int total_bytes = GET_MODE_SIZE (mode);
7378 int byte, offset, word, words, bitpos;
7379 unsigned char value;
7380 /* There are always 32 bits in each long, no matter the size of
7381 the hosts long. We handle floating point representations with
7386 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7387 if (total_bytes > len || total_bytes > 24)
7389 words = 32 / UNITS_PER_WORD;
7391 memset (tmp, 0, sizeof (tmp));
7392 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7393 bitpos += BITS_PER_UNIT)
7395 byte = (bitpos / BITS_PER_UNIT) & 3;
7396 if (UNITS_PER_WORD < 4)
7398 word = byte / UNITS_PER_WORD;
7399 if (WORDS_BIG_ENDIAN)
7400 word = (words - 1) - word;
7401 offset = word * UNITS_PER_WORD;
7402 if (BYTES_BIG_ENDIAN)
7403 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7405 offset += byte % UNITS_PER_WORD;
7408 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7409 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7411 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7414 real_from_target (&r, tmp, mode);
7415 return build_real (type, r);
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7424 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7426 tree etype, rpart, ipart;
7429 etype = TREE_TYPE (type);
7430 size = GET_MODE_SIZE (TYPE_MODE (etype));
7433 rpart = native_interpret_expr (etype, ptr, size);
7436 ipart = native_interpret_expr (etype, ptr+size, size);
7439 return build_complex (type, rpart, ipart);
7443 /* Subroutine of native_interpret_expr. Interpret the contents of
7444 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7445 If the buffer cannot be interpreted, return NULL_TREE. */
7448 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7450 tree etype, elem, elements;
7453 etype = TREE_TYPE (type);
7454 size = GET_MODE_SIZE (TYPE_MODE (etype));
7455 count = TYPE_VECTOR_SUBPARTS (type);
7456 if (size * count > len)
7459 elements = NULL_TREE;
7460 for (i = count - 1; i >= 0; i--)
7462 elem = native_interpret_expr (etype, ptr+(i*size), size);
7465 elements = tree_cons (NULL_TREE, elem, elements);
7467 return build_vector (type, elements);
7471 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7472 the buffer PTR of length LEN as a constant of type TYPE. For
7473 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7474 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7475 return NULL_TREE. */
7478 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7480 switch (TREE_CODE (type))
7485 return native_interpret_int (type, ptr, len);
7488 return native_interpret_real (type, ptr, len);
7491 return native_interpret_complex (type, ptr, len);
7494 return native_interpret_vector (type, ptr, len);
7502 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7503 TYPE at compile-time. If we're unable to perform the conversion
7504 return NULL_TREE. */
7507 fold_view_convert_expr (tree type, tree expr)
7509 /* We support up to 512-bit values (for V8DFmode). */
7510 unsigned char buffer[64];
7513 /* Check that the host and target are sane. */
7514 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7517 len = native_encode_expr (expr, buffer, sizeof (buffer));
7521 return native_interpret_expr (type, buffer, len);
7524 /* Build an expression for the address of T. Folds away INDIRECT_REF
7525 to avoid confusing the gimplify process. When IN_FOLD is true
7526 avoid modifications of T. */
7529 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7531 /* The size of the object is not relevant when talking about its address. */
7532 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7533 t = TREE_OPERAND (t, 0);
7535 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7536 if (TREE_CODE (t) == INDIRECT_REF
7537 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7539 t = TREE_OPERAND (t, 0);
7541 if (TREE_TYPE (t) != ptrtype)
7542 t = build1 (NOP_EXPR, ptrtype, t);
7548 while (handled_component_p (base))
7549 base = TREE_OPERAND (base, 0);
7552 TREE_ADDRESSABLE (base) = 1;
7554 t = build1 (ADDR_EXPR, ptrtype, t);
7557 t = build1 (ADDR_EXPR, ptrtype, t);
7562 /* Build an expression for the address of T with type PTRTYPE. This
7563 function modifies the input parameter 'T' by sometimes setting the
7564 TREE_ADDRESSABLE flag. */
7567 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7569 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7572 /* Build an expression for the address of T. This function modifies
7573 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7574 flag. When called from fold functions, use fold_addr_expr instead. */
7577 build_fold_addr_expr (tree t)
7579 return build_fold_addr_expr_with_type_1 (t,
7580 build_pointer_type (TREE_TYPE (t)),
7584 /* Same as build_fold_addr_expr, builds an expression for the address
7585 of T, but avoids touching the input node 't'. Fold functions
7586 should use this version. */
7589 fold_addr_expr (tree t)
7591 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7593 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7596 /* Fold a unary expression of code CODE and type TYPE with operand
7597 OP0. Return the folded expression if folding is successful.
7598 Otherwise, return NULL_TREE. */
7601 fold_unary (enum tree_code code, tree type, tree op0)
7605 enum tree_code_class kind = TREE_CODE_CLASS (code);
7607 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7608 && TREE_CODE_LENGTH (code) == 1);
7613 if (code == NOP_EXPR || code == CONVERT_EXPR
7614 || code == FLOAT_EXPR || code == ABS_EXPR)
7616 /* Don't use STRIP_NOPS, because signedness of argument type
7618 STRIP_SIGN_NOPS (arg0);
7622 /* Strip any conversions that don't change the mode. This
7623 is safe for every expression, except for a comparison
7624 expression because its signedness is derived from its
7627 Note that this is done as an internal manipulation within
7628 the constant folder, in order to find the simplest
7629 representation of the arguments so that their form can be
7630 studied. In any cases, the appropriate type conversions
7631 should be put back in the tree that will get out of the
7637 if (TREE_CODE_CLASS (code) == tcc_unary)
7639 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7640 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7641 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7642 else if (TREE_CODE (arg0) == COND_EXPR)
7644 tree arg01 = TREE_OPERAND (arg0, 1);
7645 tree arg02 = TREE_OPERAND (arg0, 2);
7646 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7647 arg01 = fold_build1 (code, type, arg01);
7648 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7649 arg02 = fold_build1 (code, type, arg02);
7650 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7653 /* If this was a conversion, and all we did was to move into
7654 inside the COND_EXPR, bring it back out. But leave it if
7655 it is a conversion from integer to integer and the
7656 result precision is no wider than a word since such a
7657 conversion is cheap and may be optimized away by combine,
7658 while it couldn't if it were outside the COND_EXPR. Then return
7659 so we don't get into an infinite recursion loop taking the
7660 conversion out and then back in. */
7662 if ((code == NOP_EXPR || code == CONVERT_EXPR
7663 || code == NON_LVALUE_EXPR)
7664 && TREE_CODE (tem) == COND_EXPR
7665 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7666 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7667 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7668 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7669 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7670 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7671 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7673 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7674 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7675 || flag_syntax_only))
7676 tem = build1 (code, type,
7678 TREE_TYPE (TREE_OPERAND
7679 (TREE_OPERAND (tem, 1), 0)),
7680 TREE_OPERAND (tem, 0),
7681 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7682 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7685 else if (COMPARISON_CLASS_P (arg0))
7687 if (TREE_CODE (type) == BOOLEAN_TYPE)
7689 arg0 = copy_node (arg0);
7690 TREE_TYPE (arg0) = type;
7693 else if (TREE_CODE (type) != INTEGER_TYPE)
7694 return fold_build3 (COND_EXPR, type, arg0,
7695 fold_build1 (code, type,
7697 fold_build1 (code, type,
7698 integer_zero_node));
7705 /* Re-association barriers around constants and other re-association
7706 barriers can be removed. */
7707 if (CONSTANT_CLASS_P (op0)
7708 || TREE_CODE (op0) == PAREN_EXPR)
7709 return fold_convert (type, op0);
7715 case FIX_TRUNC_EXPR:
7716 if (TREE_TYPE (op0) == type)
7719 /* If we have (type) (a CMP b) and type is an integral type, return
7720 new expression involving the new type. */
7721 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7722 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7723 TREE_OPERAND (op0, 1));
7725 /* Handle cases of two conversions in a row. */
7726 if (TREE_CODE (op0) == NOP_EXPR
7727 || TREE_CODE (op0) == CONVERT_EXPR)
7729 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7730 tree inter_type = TREE_TYPE (op0);
7731 int inside_int = INTEGRAL_TYPE_P (inside_type);
7732 int inside_ptr = POINTER_TYPE_P (inside_type);
7733 int inside_float = FLOAT_TYPE_P (inside_type);
7734 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7735 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7736 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7737 int inter_int = INTEGRAL_TYPE_P (inter_type);
7738 int inter_ptr = POINTER_TYPE_P (inter_type);
7739 int inter_float = FLOAT_TYPE_P (inter_type);
7740 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7741 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7742 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7743 int final_int = INTEGRAL_TYPE_P (type);
7744 int final_ptr = POINTER_TYPE_P (type);
7745 int final_float = FLOAT_TYPE_P (type);
7746 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7747 unsigned int final_prec = TYPE_PRECISION (type);
7748 int final_unsignedp = TYPE_UNSIGNED (type);
7750 /* In addition to the cases of two conversions in a row
7751 handled below, if we are converting something to its own
7752 type via an object of identical or wider precision, neither
7753 conversion is needed. */
7754 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7755 && (((inter_int || inter_ptr) && final_int)
7756 || (inter_float && final_float))
7757 && inter_prec >= final_prec)
7758 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7760 /* Likewise, if the intermediate and final types are either both
7761 float or both integer, we don't need the middle conversion if
7762 it is wider than the final type and doesn't change the signedness
7763 (for integers). Avoid this if the final type is a pointer
7764 since then we sometimes need the inner conversion. Likewise if
7765 the outer has a precision not equal to the size of its mode. */
7766 if (((inter_int && inside_int)
7767 || (inter_float && inside_float)
7768 || (inter_vec && inside_vec))
7769 && inter_prec >= inside_prec
7770 && (inter_float || inter_vec
7771 || inter_unsignedp == inside_unsignedp)
7772 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7773 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7775 && (! final_vec || inter_prec == inside_prec))
7776 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7778 /* If we have a sign-extension of a zero-extended value, we can
7779 replace that by a single zero-extension. */
7780 if (inside_int && inter_int && final_int
7781 && inside_prec < inter_prec && inter_prec < final_prec
7782 && inside_unsignedp && !inter_unsignedp)
7783 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7785 /* Two conversions in a row are not needed unless:
7786 - some conversion is floating-point (overstrict for now), or
7787 - some conversion is a vector (overstrict for now), or
7788 - the intermediate type is narrower than both initial and
7790 - the intermediate type and innermost type differ in signedness,
7791 and the outermost type is wider than the intermediate, or
7792 - the initial type is a pointer type and the precisions of the
7793 intermediate and final types differ, or
7794 - the final type is a pointer type and the precisions of the
7795 initial and intermediate types differ. */
7796 if (! inside_float && ! inter_float && ! final_float
7797 && ! inside_vec && ! inter_vec && ! final_vec
7798 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7799 && ! (inside_int && inter_int
7800 && inter_unsignedp != inside_unsignedp
7801 && inter_prec < final_prec)
7802 && ((inter_unsignedp && inter_prec > inside_prec)
7803 == (final_unsignedp && final_prec > inter_prec))
7804 && ! (inside_ptr && inter_prec != final_prec)
7805 && ! (final_ptr && inside_prec != inter_prec)
7806 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7807 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7808 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7811 /* Handle (T *)&A.B.C for A being of type T and B and C
7812 living at offset zero. This occurs frequently in
7813 C++ upcasting and then accessing the base. */
7814 if (TREE_CODE (op0) == ADDR_EXPR
7815 && POINTER_TYPE_P (type)
7816 && handled_component_p (TREE_OPERAND (op0, 0)))
7818 HOST_WIDE_INT bitsize, bitpos;
7820 enum machine_mode mode;
7821 int unsignedp, volatilep;
7822 tree base = TREE_OPERAND (op0, 0);
7823 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7824 &mode, &unsignedp, &volatilep, false);
7825 /* If the reference was to a (constant) zero offset, we can use
7826 the address of the base if it has the same base type
7827 as the result type. */
7828 if (! offset && bitpos == 0
7829 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7830 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7831 return fold_convert (type, fold_addr_expr (base));
7834 if ((TREE_CODE (op0) == MODIFY_EXPR
7835 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7836 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7837 /* Detect assigning a bitfield. */
7838 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7840 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7842 /* Don't leave an assignment inside a conversion
7843 unless assigning a bitfield. */
7844 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7845 /* First do the assignment, then return converted constant. */
7846 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7847 TREE_NO_WARNING (tem) = 1;
7848 TREE_USED (tem) = 1;
7852 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7853 constants (if x has signed type, the sign bit cannot be set
7854 in c). This folds extension into the BIT_AND_EXPR. */
7855 if (INTEGRAL_TYPE_P (type)
7856 && TREE_CODE (type) != BOOLEAN_TYPE
7857 && TREE_CODE (op0) == BIT_AND_EXPR
7858 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7861 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7864 if (TYPE_UNSIGNED (TREE_TYPE (and))
7865 || (TYPE_PRECISION (type)
7866 <= TYPE_PRECISION (TREE_TYPE (and))))
7868 else if (TYPE_PRECISION (TREE_TYPE (and1))
7869 <= HOST_BITS_PER_WIDE_INT
7870 && host_integerp (and1, 1))
7872 unsigned HOST_WIDE_INT cst;
7874 cst = tree_low_cst (and1, 1);
7875 cst &= (HOST_WIDE_INT) -1
7876 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7877 change = (cst == 0);
7878 #ifdef LOAD_EXTEND_OP
7880 && !flag_syntax_only
7881 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7884 tree uns = unsigned_type_for (TREE_TYPE (and0));
7885 and0 = fold_convert (uns, and0);
7886 and1 = fold_convert (uns, and1);
7892 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7893 TREE_INT_CST_HIGH (and1), 0,
7894 TREE_OVERFLOW (and1));
7895 return fold_build2 (BIT_AND_EXPR, type,
7896 fold_convert (type, and0), tem);
7900 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7901 when one of the new casts will fold away. Conservatively we assume
7902 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7903 if (POINTER_TYPE_P (type)
7904 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7905 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7906 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7907 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7909 tree arg00 = TREE_OPERAND (arg0, 0);
7910 tree arg01 = TREE_OPERAND (arg0, 1);
7912 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7913 fold_convert (sizetype, arg01));
7916 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7917 of the same precision, and X is an integer type not narrower than
7918 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7919 if (INTEGRAL_TYPE_P (type)
7920 && TREE_CODE (op0) == BIT_NOT_EXPR
7921 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7922 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7923 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7924 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7926 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7927 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7928 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7929 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7932 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7933 type of X and Y (integer types only). */
7934 if (INTEGRAL_TYPE_P (type)
7935 && TREE_CODE (op0) == MULT_EXPR
7936 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7937 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7939 /* Be careful not to introduce new overflows. */
7941 if (TYPE_OVERFLOW_WRAPS (type))
7944 mult_type = unsigned_type_for (type);
7946 tem = fold_build2 (MULT_EXPR, mult_type,
7947 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7948 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7949 return fold_convert (type, tem);
7952 tem = fold_convert_const (code, type, op0);
7953 return tem ? tem : NULL_TREE;
7955 case FIXED_CONVERT_EXPR:
7956 tem = fold_convert_const (code, type, arg0);
7957 return tem ? tem : NULL_TREE;
7959 case VIEW_CONVERT_EXPR:
7960 if (TREE_TYPE (op0) == type)
7962 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7963 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7965 /* For integral conversions with the same precision or pointer
7966 conversions use a NOP_EXPR instead. */
7967 if ((INTEGRAL_TYPE_P (type)
7968 || POINTER_TYPE_P (type))
7969 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7970 || POINTER_TYPE_P (TREE_TYPE (op0)))
7971 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7972 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7973 a sub-type to its base type as generated by the Ada FE. */
7974 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7975 && TREE_TYPE (TREE_TYPE (op0))))
7976 return fold_convert (type, op0);
7978 /* Strip inner integral conversions that do not change the precision. */
7979 if ((TREE_CODE (op0) == NOP_EXPR
7980 || TREE_CODE (op0) == CONVERT_EXPR)
7981 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7982 || POINTER_TYPE_P (TREE_TYPE (op0)))
7983 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7984 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7985 && (TYPE_PRECISION (TREE_TYPE (op0))
7986 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7987 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7989 return fold_view_convert_expr (type, op0);
7992 tem = fold_negate_expr (arg0);
7994 return fold_convert (type, tem);
7998 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7999 return fold_abs_const (arg0, type);
8000 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8001 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8002 /* Convert fabs((double)float) into (double)fabsf(float). */
8003 else if (TREE_CODE (arg0) == NOP_EXPR
8004 && TREE_CODE (type) == REAL_TYPE)
8006 tree targ0 = strip_float_extensions (arg0);
8008 return fold_convert (type, fold_build1 (ABS_EXPR,
8012 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8013 else if (TREE_CODE (arg0) == ABS_EXPR)
8015 else if (tree_expr_nonnegative_p (arg0))
8018 /* Strip sign ops from argument. */
8019 if (TREE_CODE (type) == REAL_TYPE)
8021 tem = fold_strip_sign_ops (arg0);
8023 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8028 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8029 return fold_convert (type, arg0);
8030 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8032 tree itype = TREE_TYPE (type);
8033 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8034 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8035 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8037 if (TREE_CODE (arg0) == COMPLEX_CST)
8039 tree itype = TREE_TYPE (type);
8040 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8041 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8042 return build_complex (type, rpart, negate_expr (ipart));
8044 if (TREE_CODE (arg0) == CONJ_EXPR)
8045 return fold_convert (type, TREE_OPERAND (arg0, 0));
8049 if (TREE_CODE (arg0) == INTEGER_CST)
8050 return fold_not_const (arg0, type);
8051 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8052 return fold_convert (type, TREE_OPERAND (arg0, 0));
8053 /* Convert ~ (-A) to A - 1. */
8054 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8055 return fold_build2 (MINUS_EXPR, type,
8056 fold_convert (type, TREE_OPERAND (arg0, 0)),
8057 build_int_cst (type, 1));
8058 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8059 else if (INTEGRAL_TYPE_P (type)
8060 && ((TREE_CODE (arg0) == MINUS_EXPR
8061 && integer_onep (TREE_OPERAND (arg0, 1)))
8062 || (TREE_CODE (arg0) == PLUS_EXPR
8063 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8064 return fold_build1 (NEGATE_EXPR, type,
8065 fold_convert (type, TREE_OPERAND (arg0, 0)));
8066 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8067 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8068 && (tem = fold_unary (BIT_NOT_EXPR, type,
8070 TREE_OPERAND (arg0, 0)))))
8071 return fold_build2 (BIT_XOR_EXPR, type, tem,
8072 fold_convert (type, TREE_OPERAND (arg0, 1)));
8073 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8074 && (tem = fold_unary (BIT_NOT_EXPR, type,
8076 TREE_OPERAND (arg0, 1)))))
8077 return fold_build2 (BIT_XOR_EXPR, type,
8078 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8079 /* Perform BIT_NOT_EXPR on each element individually. */
8080 else if (TREE_CODE (arg0) == VECTOR_CST)
8082 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8083 int count = TYPE_VECTOR_SUBPARTS (type), i;
8085 for (i = 0; i < count; i++)
8089 elem = TREE_VALUE (elements);
8090 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8091 if (elem == NULL_TREE)
8093 elements = TREE_CHAIN (elements);
8096 elem = build_int_cst (TREE_TYPE (type), -1);
8097 list = tree_cons (NULL_TREE, elem, list);
8100 return build_vector (type, nreverse (list));
8105 case TRUTH_NOT_EXPR:
8106 /* The argument to invert_truthvalue must have Boolean type. */
8107 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8108 arg0 = fold_convert (boolean_type_node, arg0);
8110 /* Note that the operand of this must be an int
8111 and its values must be 0 or 1.
8112 ("true" is a fixed value perhaps depending on the language,
8113 but we don't handle values other than 1 correctly yet.) */
8114 tem = fold_truth_not_expr (arg0);
8117 return fold_convert (type, tem);
8120 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8121 return fold_convert (type, arg0);
8122 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8123 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8124 TREE_OPERAND (arg0, 1));
8125 if (TREE_CODE (arg0) == COMPLEX_CST)
8126 return fold_convert (type, TREE_REALPART (arg0));
8127 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8129 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8130 tem = fold_build2 (TREE_CODE (arg0), itype,
8131 fold_build1 (REALPART_EXPR, itype,
8132 TREE_OPERAND (arg0, 0)),
8133 fold_build1 (REALPART_EXPR, itype,
8134 TREE_OPERAND (arg0, 1)));
8135 return fold_convert (type, tem);
8137 if (TREE_CODE (arg0) == CONJ_EXPR)
8139 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8140 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8141 return fold_convert (type, tem);
8143 if (TREE_CODE (arg0) == CALL_EXPR)
8145 tree fn = get_callee_fndecl (arg0);
8146 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8147 switch (DECL_FUNCTION_CODE (fn))
8149 CASE_FLT_FN (BUILT_IN_CEXPI):
8150 fn = mathfn_built_in (type, BUILT_IN_COS);
8152 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8162 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8163 return fold_convert (type, integer_zero_node);
8164 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8165 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8166 TREE_OPERAND (arg0, 0));
8167 if (TREE_CODE (arg0) == COMPLEX_CST)
8168 return fold_convert (type, TREE_IMAGPART (arg0));
8169 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8171 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8172 tem = fold_build2 (TREE_CODE (arg0), itype,
8173 fold_build1 (IMAGPART_EXPR, itype,
8174 TREE_OPERAND (arg0, 0)),
8175 fold_build1 (IMAGPART_EXPR, itype,
8176 TREE_OPERAND (arg0, 1)));
8177 return fold_convert (type, tem);
8179 if (TREE_CODE (arg0) == CONJ_EXPR)
8181 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8182 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8183 return fold_convert (type, negate_expr (tem));
8185 if (TREE_CODE (arg0) == CALL_EXPR)
8187 tree fn = get_callee_fndecl (arg0);
8188 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8189 switch (DECL_FUNCTION_CODE (fn))
8191 CASE_FLT_FN (BUILT_IN_CEXPI):
8192 fn = mathfn_built_in (type, BUILT_IN_SIN);
8194 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8205 } /* switch (code) */
8208 /* Fold a binary expression of code CODE and type TYPE with operands
8209 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8210 Return the folded expression if folding is successful. Otherwise,
8211 return NULL_TREE. */
8214 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8216 enum tree_code compl_code;
8218 if (code == MIN_EXPR)
8219 compl_code = MAX_EXPR;
8220 else if (code == MAX_EXPR)
8221 compl_code = MIN_EXPR;
8225 /* MIN (MAX (a, b), b) == b. */
8226 if (TREE_CODE (op0) == compl_code
8227 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8228 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8230 /* MIN (MAX (b, a), b) == b. */
8231 if (TREE_CODE (op0) == compl_code
8232 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8233 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8234 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8236 /* MIN (a, MAX (a, b)) == a. */
8237 if (TREE_CODE (op1) == compl_code
8238 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8239 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8240 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8242 /* MIN (a, MAX (b, a)) == a. */
8243 if (TREE_CODE (op1) == compl_code
8244 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8245 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8246 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8251 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8252 by changing CODE to reduce the magnitude of constants involved in
8253 ARG0 of the comparison.
8254 Returns a canonicalized comparison tree if a simplification was
8255 possible, otherwise returns NULL_TREE.
8256 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8257 valid if signed overflow is undefined. */
8260 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8261 tree arg0, tree arg1,
8262 bool *strict_overflow_p)
8264 enum tree_code code0 = TREE_CODE (arg0);
8265 tree t, cst0 = NULL_TREE;
8269 /* Match A +- CST code arg1 and CST code arg1. */
8270 if (!(((code0 == MINUS_EXPR
8271 || code0 == PLUS_EXPR)
8272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8273 || code0 == INTEGER_CST))
8276 /* Identify the constant in arg0 and its sign. */
8277 if (code0 == INTEGER_CST)
8280 cst0 = TREE_OPERAND (arg0, 1);
8281 sgn0 = tree_int_cst_sgn (cst0);
8283 /* Overflowed constants and zero will cause problems. */
8284 if (integer_zerop (cst0)
8285 || TREE_OVERFLOW (cst0))
8288 /* See if we can reduce the magnitude of the constant in
8289 arg0 by changing the comparison code. */
8290 if (code0 == INTEGER_CST)
8292 /* CST <= arg1 -> CST-1 < arg1. */
8293 if (code == LE_EXPR && sgn0 == 1)
8295 /* -CST < arg1 -> -CST-1 <= arg1. */
8296 else if (code == LT_EXPR && sgn0 == -1)
8298 /* CST > arg1 -> CST-1 >= arg1. */
8299 else if (code == GT_EXPR && sgn0 == 1)
8301 /* -CST >= arg1 -> -CST-1 > arg1. */
8302 else if (code == GE_EXPR && sgn0 == -1)
8306 /* arg1 code' CST' might be more canonical. */
8311 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8313 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8315 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8316 else if (code == GT_EXPR
8317 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8319 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8320 else if (code == LE_EXPR
8321 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8323 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8324 else if (code == GE_EXPR
8325 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8329 *strict_overflow_p = true;
8332 /* Now build the constant reduced in magnitude. */
8333 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8334 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8335 if (code0 != INTEGER_CST)
8336 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8338 /* If swapping might yield to a more canonical form, do so. */
8340 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8342 return fold_build2 (code, type, t, arg1);
8345 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8346 overflow further. Try to decrease the magnitude of constants involved
8347 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8348 and put sole constants at the second argument position.
8349 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8352 maybe_canonicalize_comparison (enum tree_code code, tree type,
8353 tree arg0, tree arg1)
8356 bool strict_overflow_p;
8357 const char * const warnmsg = G_("assuming signed overflow does not occur "
8358 "when reducing constant in comparison");
8360 /* In principle pointers also have undefined overflow behavior,
8361 but that causes problems elsewhere. */
8362 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8363 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8366 /* Try canonicalization by simplifying arg0. */
8367 strict_overflow_p = false;
8368 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8369 &strict_overflow_p);
8372 if (strict_overflow_p)
8373 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8377 /* Try canonicalization by simplifying arg1 using the swapped
8379 code = swap_tree_comparison (code);
8380 strict_overflow_p = false;
8381 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8382 &strict_overflow_p);
8383 if (t && strict_overflow_p)
8384 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8388 /* Subroutine of fold_binary. This routine performs all of the
8389 transformations that are common to the equality/inequality
8390 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8391 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8392 fold_binary should call fold_binary. Fold a comparison with
8393 tree code CODE and type TYPE with operands OP0 and OP1. Return
8394 the folded comparison or NULL_TREE. */
8397 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8399 tree arg0, arg1, tem;
8404 STRIP_SIGN_NOPS (arg0);
8405 STRIP_SIGN_NOPS (arg1);
8407 tem = fold_relational_const (code, type, arg0, arg1);
8408 if (tem != NULL_TREE)
8411 /* If one arg is a real or integer constant, put it last. */
8412 if (tree_swap_operands_p (arg0, arg1, true))
8413 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8415 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8416 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8417 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8418 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8419 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8420 && (TREE_CODE (arg1) == INTEGER_CST
8421 && !TREE_OVERFLOW (arg1)))
8423 tree const1 = TREE_OPERAND (arg0, 1);
8425 tree variable = TREE_OPERAND (arg0, 0);
8428 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8430 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8431 TREE_TYPE (arg1), const2, const1);
8433 /* If the constant operation overflowed this can be
8434 simplified as a comparison against INT_MAX/INT_MIN. */
8435 if (TREE_CODE (lhs) == INTEGER_CST
8436 && TREE_OVERFLOW (lhs))
8438 int const1_sgn = tree_int_cst_sgn (const1);
8439 enum tree_code code2 = code;
8441 /* Get the sign of the constant on the lhs if the
8442 operation were VARIABLE + CONST1. */
8443 if (TREE_CODE (arg0) == MINUS_EXPR)
8444 const1_sgn = -const1_sgn;
8446 /* The sign of the constant determines if we overflowed
8447 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8448 Canonicalize to the INT_MIN overflow by swapping the comparison
8450 if (const1_sgn == -1)
8451 code2 = swap_tree_comparison (code);
8453 /* We now can look at the canonicalized case
8454 VARIABLE + 1 CODE2 INT_MIN
8455 and decide on the result. */
8456 if (code2 == LT_EXPR
8458 || code2 == EQ_EXPR)
8459 return omit_one_operand (type, boolean_false_node, variable);
8460 else if (code2 == NE_EXPR
8462 || code2 == GT_EXPR)
8463 return omit_one_operand (type, boolean_true_node, variable);
8466 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8467 && (TREE_CODE (lhs) != INTEGER_CST
8468 || !TREE_OVERFLOW (lhs)))
8470 fold_overflow_warning (("assuming signed overflow does not occur "
8471 "when changing X +- C1 cmp C2 to "
8473 WARN_STRICT_OVERFLOW_COMPARISON);
8474 return fold_build2 (code, type, variable, lhs);
8478 /* For comparisons of pointers we can decompose it to a compile time
8479 comparison of the base objects and the offsets into the object.
8480 This requires at least one operand being an ADDR_EXPR or a
8481 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8482 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8483 && (TREE_CODE (arg0) == ADDR_EXPR
8484 || TREE_CODE (arg1) == ADDR_EXPR
8485 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8486 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8488 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8489 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8490 enum machine_mode mode;
8491 int volatilep, unsignedp;
8492 bool indirect_base0 = false, indirect_base1 = false;
8494 /* Get base and offset for the access. Strip ADDR_EXPR for
8495 get_inner_reference, but put it back by stripping INDIRECT_REF
8496 off the base object if possible. indirect_baseN will be true
8497 if baseN is not an address but refers to the object itself. */
8499 if (TREE_CODE (arg0) == ADDR_EXPR)
8501 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8502 &bitsize, &bitpos0, &offset0, &mode,
8503 &unsignedp, &volatilep, false);
8504 if (TREE_CODE (base0) == INDIRECT_REF)
8505 base0 = TREE_OPERAND (base0, 0);
8507 indirect_base0 = true;
8509 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8511 base0 = TREE_OPERAND (arg0, 0);
8512 offset0 = TREE_OPERAND (arg0, 1);
8516 if (TREE_CODE (arg1) == ADDR_EXPR)
8518 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8519 &bitsize, &bitpos1, &offset1, &mode,
8520 &unsignedp, &volatilep, false);
8521 if (TREE_CODE (base1) == INDIRECT_REF)
8522 base1 = TREE_OPERAND (base1, 0);
8524 indirect_base1 = true;
8526 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8528 base1 = TREE_OPERAND (arg1, 0);
8529 offset1 = TREE_OPERAND (arg1, 1);
8532 /* If we have equivalent bases we might be able to simplify. */
8533 if (indirect_base0 == indirect_base1
8534 && operand_equal_p (base0, base1, 0))
8536 /* We can fold this expression to a constant if the non-constant
8537 offset parts are equal. */
8538 if (offset0 == offset1
8539 || (offset0 && offset1
8540 && operand_equal_p (offset0, offset1, 0)))
8545 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8547 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8549 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8551 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8553 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8555 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8559 /* We can simplify the comparison to a comparison of the variable
8560 offset parts if the constant offset parts are equal.
8561 Be careful to use signed size type here because otherwise we
8562 mess with array offsets in the wrong way. This is possible
8563 because pointer arithmetic is restricted to retain within an
8564 object and overflow on pointer differences is undefined as of
8565 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8566 else if (bitpos0 == bitpos1)
8568 tree signed_size_type_node;
8569 signed_size_type_node = signed_type_for (size_type_node);
8571 /* By converting to signed size type we cover middle-end pointer
8572 arithmetic which operates on unsigned pointer types of size
8573 type size and ARRAY_REF offsets which are properly sign or
8574 zero extended from their type in case it is narrower than
8576 if (offset0 == NULL_TREE)
8577 offset0 = build_int_cst (signed_size_type_node, 0);
8579 offset0 = fold_convert (signed_size_type_node, offset0);
8580 if (offset1 == NULL_TREE)
8581 offset1 = build_int_cst (signed_size_type_node, 0);
8583 offset1 = fold_convert (signed_size_type_node, offset1);
8585 return fold_build2 (code, type, offset0, offset1);
8588 /* For non-equal bases we can simplify if they are addresses
8589 of local binding decls or constants. */
8590 else if (indirect_base0 && indirect_base1
8591 /* We know that !operand_equal_p (base0, base1, 0)
8592 because the if condition was false. But make
8593 sure two decls are not the same. */
8595 && TREE_CODE (arg0) == ADDR_EXPR
8596 && TREE_CODE (arg1) == ADDR_EXPR
8597 && (((TREE_CODE (base0) == VAR_DECL
8598 || TREE_CODE (base0) == PARM_DECL)
8599 && (targetm.binds_local_p (base0)
8600 || CONSTANT_CLASS_P (base1)))
8601 || CONSTANT_CLASS_P (base0))
8602 && (((TREE_CODE (base1) == VAR_DECL
8603 || TREE_CODE (base1) == PARM_DECL)
8604 && (targetm.binds_local_p (base1)
8605 || CONSTANT_CLASS_P (base0)))
8606 || CONSTANT_CLASS_P (base1)))
8608 if (code == EQ_EXPR)
8609 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8610 else if (code == NE_EXPR)
8611 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8613 /* For equal offsets we can simplify to a comparison of the
8615 else if (bitpos0 == bitpos1
8617 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8619 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8620 && ((offset0 == offset1)
8621 || (offset0 && offset1
8622 && operand_equal_p (offset0, offset1, 0))))
8625 base0 = fold_addr_expr (base0);
8627 base1 = fold_addr_expr (base1);
8628 return fold_build2 (code, type, base0, base1);
8632 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8633 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8634 the resulting offset is smaller in absolute value than the
8636 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8637 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8638 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8639 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8640 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8641 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8642 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8644 tree const1 = TREE_OPERAND (arg0, 1);
8645 tree const2 = TREE_OPERAND (arg1, 1);
8646 tree variable1 = TREE_OPERAND (arg0, 0);
8647 tree variable2 = TREE_OPERAND (arg1, 0);
8649 const char * const warnmsg = G_("assuming signed overflow does not "
8650 "occur when combining constants around "
8653 /* Put the constant on the side where it doesn't overflow and is
8654 of lower absolute value than before. */
8655 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8656 ? MINUS_EXPR : PLUS_EXPR,
8658 if (!TREE_OVERFLOW (cst)
8659 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8661 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8662 return fold_build2 (code, type,
8664 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8668 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8669 ? MINUS_EXPR : PLUS_EXPR,
8671 if (!TREE_OVERFLOW (cst)
8672 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8674 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8675 return fold_build2 (code, type,
8676 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8682 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8683 signed arithmetic case. That form is created by the compiler
8684 often enough for folding it to be of value. One example is in
8685 computing loop trip counts after Operator Strength Reduction. */
8686 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8687 && TREE_CODE (arg0) == MULT_EXPR
8688 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8689 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8690 && integer_zerop (arg1))
8692 tree const1 = TREE_OPERAND (arg0, 1);
8693 tree const2 = arg1; /* zero */
8694 tree variable1 = TREE_OPERAND (arg0, 0);
8695 enum tree_code cmp_code = code;
8697 gcc_assert (!integer_zerop (const1));
8699 fold_overflow_warning (("assuming signed overflow does not occur when "
8700 "eliminating multiplication in comparison "
8702 WARN_STRICT_OVERFLOW_COMPARISON);
8704 /* If const1 is negative we swap the sense of the comparison. */
8705 if (tree_int_cst_sgn (const1) < 0)
8706 cmp_code = swap_tree_comparison (cmp_code);
8708 return fold_build2 (cmp_code, type, variable1, const2);
8711 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8715 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8717 tree targ0 = strip_float_extensions (arg0);
8718 tree targ1 = strip_float_extensions (arg1);
8719 tree newtype = TREE_TYPE (targ0);
8721 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8722 newtype = TREE_TYPE (targ1);
8724 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8725 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8726 return fold_build2 (code, type, fold_convert (newtype, targ0),
8727 fold_convert (newtype, targ1));
8729 /* (-a) CMP (-b) -> b CMP a */
8730 if (TREE_CODE (arg0) == NEGATE_EXPR
8731 && TREE_CODE (arg1) == NEGATE_EXPR)
8732 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8733 TREE_OPERAND (arg0, 0));
8735 if (TREE_CODE (arg1) == REAL_CST)
8737 REAL_VALUE_TYPE cst;
8738 cst = TREE_REAL_CST (arg1);
8740 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8741 if (TREE_CODE (arg0) == NEGATE_EXPR)
8742 return fold_build2 (swap_tree_comparison (code), type,
8743 TREE_OPERAND (arg0, 0),
8744 build_real (TREE_TYPE (arg1),
8745 REAL_VALUE_NEGATE (cst)));
8747 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8748 /* a CMP (-0) -> a CMP 0 */
8749 if (REAL_VALUE_MINUS_ZERO (cst))
8750 return fold_build2 (code, type, arg0,
8751 build_real (TREE_TYPE (arg1), dconst0));
8753 /* x != NaN is always true, other ops are always false. */
8754 if (REAL_VALUE_ISNAN (cst)
8755 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8757 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8758 return omit_one_operand (type, tem, arg0);
8761 /* Fold comparisons against infinity. */
8762 if (REAL_VALUE_ISINF (cst))
8764 tem = fold_inf_compare (code, type, arg0, arg1);
8765 if (tem != NULL_TREE)
8770 /* If this is a comparison of a real constant with a PLUS_EXPR
8771 or a MINUS_EXPR of a real constant, we can convert it into a
8772 comparison with a revised real constant as long as no overflow
8773 occurs when unsafe_math_optimizations are enabled. */
8774 if (flag_unsafe_math_optimizations
8775 && TREE_CODE (arg1) == REAL_CST
8776 && (TREE_CODE (arg0) == PLUS_EXPR
8777 || TREE_CODE (arg0) == MINUS_EXPR)
8778 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8779 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8780 ? MINUS_EXPR : PLUS_EXPR,
8781 arg1, TREE_OPERAND (arg0, 1), 0))
8782 && !TREE_OVERFLOW (tem))
8783 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8785 /* Likewise, we can simplify a comparison of a real constant with
8786 a MINUS_EXPR whose first operand is also a real constant, i.e.
8787 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8788 floating-point types only if -fassociative-math is set. */
8789 if (flag_associative_math
8790 && TREE_CODE (arg1) == REAL_CST
8791 && TREE_CODE (arg0) == MINUS_EXPR
8792 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8793 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8795 && !TREE_OVERFLOW (tem))
8796 return fold_build2 (swap_tree_comparison (code), type,
8797 TREE_OPERAND (arg0, 1), tem);
8799 /* Fold comparisons against built-in math functions. */
8800 if (TREE_CODE (arg1) == REAL_CST
8801 && flag_unsafe_math_optimizations
8802 && ! flag_errno_math)
8804 enum built_in_function fcode = builtin_mathfn_code (arg0);
8806 if (fcode != END_BUILTINS)
8808 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8809 if (tem != NULL_TREE)
8815 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8816 && (TREE_CODE (arg0) == NOP_EXPR
8817 || TREE_CODE (arg0) == CONVERT_EXPR))
8819 /* If we are widening one operand of an integer comparison,
8820 see if the other operand is similarly being widened. Perhaps we
8821 can do the comparison in the narrower type. */
8822 tem = fold_widened_comparison (code, type, arg0, arg1);
8826 /* Or if we are changing signedness. */
8827 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8832 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8833 constant, we can simplify it. */
8834 if (TREE_CODE (arg1) == INTEGER_CST
8835 && (TREE_CODE (arg0) == MIN_EXPR
8836 || TREE_CODE (arg0) == MAX_EXPR)
8837 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8839 tem = optimize_minmax_comparison (code, type, op0, op1);
8844 /* Simplify comparison of something with itself. (For IEEE
8845 floating-point, we can only do some of these simplifications.) */
8846 if (operand_equal_p (arg0, arg1, 0))
8851 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8852 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8853 return constant_boolean_node (1, type);
8858 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8859 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8860 return constant_boolean_node (1, type);
8861 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8864 /* For NE, we can only do this simplification if integer
8865 or we don't honor IEEE floating point NaNs. */
8866 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8867 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8869 /* ... fall through ... */
8872 return constant_boolean_node (0, type);
8878 /* If we are comparing an expression that just has comparisons
8879 of two integer values, arithmetic expressions of those comparisons,
8880 and constants, we can simplify it. There are only three cases
8881 to check: the two values can either be equal, the first can be
8882 greater, or the second can be greater. Fold the expression for
8883 those three values. Since each value must be 0 or 1, we have
8884 eight possibilities, each of which corresponds to the constant 0
8885 or 1 or one of the six possible comparisons.
8887 This handles common cases like (a > b) == 0 but also handles
8888 expressions like ((x > y) - (y > x)) > 0, which supposedly
8889 occur in macroized code. */
8891 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8893 tree cval1 = 0, cval2 = 0;
8896 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8897 /* Don't handle degenerate cases here; they should already
8898 have been handled anyway. */
8899 && cval1 != 0 && cval2 != 0
8900 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8901 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8902 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8903 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8904 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8905 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8906 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8908 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8909 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8911 /* We can't just pass T to eval_subst in case cval1 or cval2
8912 was the same as ARG1. */
8915 = fold_build2 (code, type,
8916 eval_subst (arg0, cval1, maxval,
8920 = fold_build2 (code, type,
8921 eval_subst (arg0, cval1, maxval,
8925 = fold_build2 (code, type,
8926 eval_subst (arg0, cval1, minval,
8930 /* All three of these results should be 0 or 1. Confirm they are.
8931 Then use those values to select the proper code to use. */
8933 if (TREE_CODE (high_result) == INTEGER_CST
8934 && TREE_CODE (equal_result) == INTEGER_CST
8935 && TREE_CODE (low_result) == INTEGER_CST)
8937 /* Make a 3-bit mask with the high-order bit being the
8938 value for `>', the next for '=', and the low for '<'. */
8939 switch ((integer_onep (high_result) * 4)
8940 + (integer_onep (equal_result) * 2)
8941 + integer_onep (low_result))
8945 return omit_one_operand (type, integer_zero_node, arg0);
8966 return omit_one_operand (type, integer_one_node, arg0);
8970 return save_expr (build2 (code, type, cval1, cval2));
8971 return fold_build2 (code, type, cval1, cval2);
8976 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8977 into a single range test. */
8978 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8979 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8980 && TREE_CODE (arg1) == INTEGER_CST
8981 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8982 && !integer_zerop (TREE_OPERAND (arg0, 1))
8983 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8984 && !TREE_OVERFLOW (arg1))
8986 tem = fold_div_compare (code, type, arg0, arg1);
8987 if (tem != NULL_TREE)
8991 /* Fold ~X op ~Y as Y op X. */
8992 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8993 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8995 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8996 return fold_build2 (code, type,
8997 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8998 TREE_OPERAND (arg0, 0));
9001 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9002 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9003 && TREE_CODE (arg1) == INTEGER_CST)
9005 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9006 return fold_build2 (swap_tree_comparison (code), type,
9007 TREE_OPERAND (arg0, 0),
9008 fold_build1 (BIT_NOT_EXPR, cmp_type,
9009 fold_convert (cmp_type, arg1)));
9016 /* Subroutine of fold_binary. Optimize complex multiplications of the
9017 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9018 argument EXPR represents the expression "z" of type TYPE. */
9021 fold_mult_zconjz (tree type, tree expr)
9023 tree itype = TREE_TYPE (type);
9024 tree rpart, ipart, tem;
9026 if (TREE_CODE (expr) == COMPLEX_EXPR)
9028 rpart = TREE_OPERAND (expr, 0);
9029 ipart = TREE_OPERAND (expr, 1);
9031 else if (TREE_CODE (expr) == COMPLEX_CST)
9033 rpart = TREE_REALPART (expr);
9034 ipart = TREE_IMAGPART (expr);
9038 expr = save_expr (expr);
9039 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9040 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9043 rpart = save_expr (rpart);
9044 ipart = save_expr (ipart);
9045 tem = fold_build2 (PLUS_EXPR, itype,
9046 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9047 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9048 return fold_build2 (COMPLEX_EXPR, type, tem,
9049 fold_convert (itype, integer_zero_node));
9053 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9054 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9055 guarantees that P and N have the same least significant log2(M) bits.
9056 N is not otherwise constrained. In particular, N is not normalized to
9057 0 <= N < M as is common. In general, the precise value of P is unknown.
9058 M is chosen as large as possible such that constant N can be determined.
9060 Returns M and sets *RESIDUE to N. */
9062 static unsigned HOST_WIDE_INT
9063 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9065 enum tree_code code;
9069 code = TREE_CODE (expr);
9070 if (code == ADDR_EXPR)
9072 expr = TREE_OPERAND (expr, 0);
9073 if (handled_component_p (expr))
9075 HOST_WIDE_INT bitsize, bitpos;
9077 enum machine_mode mode;
9078 int unsignedp, volatilep;
9080 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9081 &mode, &unsignedp, &volatilep, false);
9082 *residue = bitpos / BITS_PER_UNIT;
9085 if (TREE_CODE (offset) == INTEGER_CST)
9086 *residue += TREE_INT_CST_LOW (offset);
9088 /* We don't handle more complicated offset expressions. */
9094 return DECL_ALIGN_UNIT (expr);
9096 else if (code == POINTER_PLUS_EXPR)
9099 unsigned HOST_WIDE_INT modulus;
9100 enum tree_code inner_code;
9102 op0 = TREE_OPERAND (expr, 0);
9104 modulus = get_pointer_modulus_and_residue (op0, residue);
9106 op1 = TREE_OPERAND (expr, 1);
9108 inner_code = TREE_CODE (op1);
9109 if (inner_code == INTEGER_CST)
9111 *residue += TREE_INT_CST_LOW (op1);
9114 else if (inner_code == MULT_EXPR)
9116 op1 = TREE_OPERAND (op1, 1);
9117 if (TREE_CODE (op1) == INTEGER_CST)
9119 unsigned HOST_WIDE_INT align;
9121 /* Compute the greatest power-of-2 divisor of op1. */
9122 align = TREE_INT_CST_LOW (op1);
9125 /* If align is non-zero and less than *modulus, replace
9126 *modulus with align., If align is 0, then either op1 is 0
9127 or the greatest power-of-2 divisor of op1 doesn't fit in an
9128 unsigned HOST_WIDE_INT. In either case, no additional
9129 constraint is imposed. */
9131 modulus = MIN (modulus, align);
9138 /* If we get here, we were unable to determine anything useful about the
9144 /* Fold a binary expression of code CODE and type TYPE with operands
9145 OP0 and OP1. Return the folded expression if folding is
9146 successful. Otherwise, return NULL_TREE. */
9149 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9151 enum tree_code_class kind = TREE_CODE_CLASS (code);
9152 tree arg0, arg1, tem;
9153 tree t1 = NULL_TREE;
9154 bool strict_overflow_p;
9156 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9157 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9158 && TREE_CODE_LENGTH (code) == 2
9160 && op1 != NULL_TREE);
9165 /* Strip any conversions that don't change the mode. This is
9166 safe for every expression, except for a comparison expression
9167 because its signedness is derived from its operands. So, in
9168 the latter case, only strip conversions that don't change the
9171 Note that this is done as an internal manipulation within the
9172 constant folder, in order to find the simplest representation
9173 of the arguments so that their form can be studied. In any
9174 cases, the appropriate type conversions should be put back in
9175 the tree that will get out of the constant folder. */
9177 if (kind == tcc_comparison)
9179 STRIP_SIGN_NOPS (arg0);
9180 STRIP_SIGN_NOPS (arg1);
9188 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9189 constant but we can't do arithmetic on them. */
9190 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9191 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9192 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9193 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9194 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9195 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9197 if (kind == tcc_binary)
9199 /* Make sure type and arg0 have the same saturating flag. */
9200 gcc_assert (TYPE_SATURATING (type)
9201 == TYPE_SATURATING (TREE_TYPE (arg0)));
9202 tem = const_binop (code, arg0, arg1, 0);
9204 else if (kind == tcc_comparison)
9205 tem = fold_relational_const (code, type, arg0, arg1);
9209 if (tem != NULL_TREE)
9211 if (TREE_TYPE (tem) != type)
9212 tem = fold_convert (type, tem);
9217 /* If this is a commutative operation, and ARG0 is a constant, move it
9218 to ARG1 to reduce the number of tests below. */
9219 if (commutative_tree_code (code)
9220 && tree_swap_operands_p (arg0, arg1, true))
9221 return fold_build2 (code, type, op1, op0);
9223 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9225 First check for cases where an arithmetic operation is applied to a
9226 compound, conditional, or comparison operation. Push the arithmetic
9227 operation inside the compound or conditional to see if any folding
9228 can then be done. Convert comparison to conditional for this purpose.
9229 The also optimizes non-constant cases that used to be done in
9232 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9233 one of the operands is a comparison and the other is a comparison, a
9234 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9235 code below would make the expression more complex. Change it to a
9236 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9237 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9239 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9240 || code == EQ_EXPR || code == NE_EXPR)
9241 && ((truth_value_p (TREE_CODE (arg0))
9242 && (truth_value_p (TREE_CODE (arg1))
9243 || (TREE_CODE (arg1) == BIT_AND_EXPR
9244 && integer_onep (TREE_OPERAND (arg1, 1)))))
9245 || (truth_value_p (TREE_CODE (arg1))
9246 && (truth_value_p (TREE_CODE (arg0))
9247 || (TREE_CODE (arg0) == BIT_AND_EXPR
9248 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9250 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9251 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9254 fold_convert (boolean_type_node, arg0),
9255 fold_convert (boolean_type_node, arg1));
9257 if (code == EQ_EXPR)
9258 tem = invert_truthvalue (tem);
9260 return fold_convert (type, tem);
9263 if (TREE_CODE_CLASS (code) == tcc_binary
9264 || TREE_CODE_CLASS (code) == tcc_comparison)
9266 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9267 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9268 fold_build2 (code, type,
9269 fold_convert (TREE_TYPE (op0),
9270 TREE_OPERAND (arg0, 1)),
9272 if (TREE_CODE (arg1) == COMPOUND_EXPR
9273 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9274 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9275 fold_build2 (code, type, op0,
9276 fold_convert (TREE_TYPE (op1),
9277 TREE_OPERAND (arg1, 1))));
9279 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9281 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9283 /*cond_first_p=*/1);
9284 if (tem != NULL_TREE)
9288 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9290 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9292 /*cond_first_p=*/0);
9293 if (tem != NULL_TREE)
9300 case POINTER_PLUS_EXPR:
9301 /* 0 +p index -> (type)index */
9302 if (integer_zerop (arg0))
9303 return non_lvalue (fold_convert (type, arg1));
9305 /* PTR +p 0 -> PTR */
9306 if (integer_zerop (arg1))
9307 return non_lvalue (fold_convert (type, arg0));
9309 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9310 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9311 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9312 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9313 fold_convert (sizetype, arg1),
9314 fold_convert (sizetype, arg0)));
9316 /* index +p PTR -> PTR +p index */
9317 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9318 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9319 return fold_build2 (POINTER_PLUS_EXPR, type,
9320 fold_convert (type, arg1),
9321 fold_convert (sizetype, arg0));
9323 /* (PTR +p B) +p A -> PTR +p (B + A) */
9324 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9327 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9328 tree arg00 = TREE_OPERAND (arg0, 0);
9329 inner = fold_build2 (PLUS_EXPR, sizetype,
9330 arg01, fold_convert (sizetype, arg1));
9331 return fold_convert (type,
9332 fold_build2 (POINTER_PLUS_EXPR,
9333 TREE_TYPE (arg00), arg00, inner));
9336 /* PTR_CST +p CST -> CST1 */
9337 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9338 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9340 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9341 of the array. Loop optimizer sometimes produce this type of
9343 if (TREE_CODE (arg0) == ADDR_EXPR)
9345 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9347 return fold_convert (type, tem);
9353 /* PTR + INT -> (INT)(PTR p+ INT) */
9354 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9355 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9356 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9359 fold_convert (sizetype, arg1)));
9360 /* INT + PTR -> (INT)(PTR p+ INT) */
9361 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9362 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9363 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9366 fold_convert (sizetype, arg0)));
9367 /* A + (-B) -> A - B */
9368 if (TREE_CODE (arg1) == NEGATE_EXPR)
9369 return fold_build2 (MINUS_EXPR, type,
9370 fold_convert (type, arg0),
9371 fold_convert (type, TREE_OPERAND (arg1, 0)));
9372 /* (-A) + B -> B - A */
9373 if (TREE_CODE (arg0) == NEGATE_EXPR
9374 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9375 return fold_build2 (MINUS_EXPR, type,
9376 fold_convert (type, arg1),
9377 fold_convert (type, TREE_OPERAND (arg0, 0)));
9379 if (INTEGRAL_TYPE_P (type))
9381 /* Convert ~A + 1 to -A. */
9382 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9383 && integer_onep (arg1))
9384 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9387 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9388 && !TYPE_OVERFLOW_TRAPS (type))
9390 tree tem = TREE_OPERAND (arg0, 0);
9393 if (operand_equal_p (tem, arg1, 0))
9395 t1 = build_int_cst_type (type, -1);
9396 return omit_one_operand (type, t1, arg1);
9401 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9402 && !TYPE_OVERFLOW_TRAPS (type))
9404 tree tem = TREE_OPERAND (arg1, 0);
9407 if (operand_equal_p (arg0, tem, 0))
9409 t1 = build_int_cst_type (type, -1);
9410 return omit_one_operand (type, t1, arg0);
9414 /* X + (X / CST) * -CST is X % CST. */
9415 if (TREE_CODE (arg1) == MULT_EXPR
9416 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9417 && operand_equal_p (arg0,
9418 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9420 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9421 tree cst1 = TREE_OPERAND (arg1, 1);
9422 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9423 if (sum && integer_zerop (sum))
9424 return fold_convert (type,
9425 fold_build2 (TRUNC_MOD_EXPR,
9426 TREE_TYPE (arg0), arg0, cst0));
9430 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9431 same or one. Make sure type is not saturating.
9432 fold_plusminus_mult_expr will re-associate. */
9433 if ((TREE_CODE (arg0) == MULT_EXPR
9434 || TREE_CODE (arg1) == MULT_EXPR)
9435 && !TYPE_SATURATING (type)
9436 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9438 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9443 if (! FLOAT_TYPE_P (type))
9445 if (integer_zerop (arg1))
9446 return non_lvalue (fold_convert (type, arg0));
9448 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9449 with a constant, and the two constants have no bits in common,
9450 we should treat this as a BIT_IOR_EXPR since this may produce more
9452 if (TREE_CODE (arg0) == BIT_AND_EXPR
9453 && TREE_CODE (arg1) == BIT_AND_EXPR
9454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9455 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9456 && integer_zerop (const_binop (BIT_AND_EXPR,
9457 TREE_OPERAND (arg0, 1),
9458 TREE_OPERAND (arg1, 1), 0)))
9460 code = BIT_IOR_EXPR;
9464 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9465 (plus (plus (mult) (mult)) (foo)) so that we can
9466 take advantage of the factoring cases below. */
9467 if (((TREE_CODE (arg0) == PLUS_EXPR
9468 || TREE_CODE (arg0) == MINUS_EXPR)
9469 && TREE_CODE (arg1) == MULT_EXPR)
9470 || ((TREE_CODE (arg1) == PLUS_EXPR
9471 || TREE_CODE (arg1) == MINUS_EXPR)
9472 && TREE_CODE (arg0) == MULT_EXPR))
9474 tree parg0, parg1, parg, marg;
9475 enum tree_code pcode;
9477 if (TREE_CODE (arg1) == MULT_EXPR)
9478 parg = arg0, marg = arg1;
9480 parg = arg1, marg = arg0;
9481 pcode = TREE_CODE (parg);
9482 parg0 = TREE_OPERAND (parg, 0);
9483 parg1 = TREE_OPERAND (parg, 1);
9487 if (TREE_CODE (parg0) == MULT_EXPR
9488 && TREE_CODE (parg1) != MULT_EXPR)
9489 return fold_build2 (pcode, type,
9490 fold_build2 (PLUS_EXPR, type,
9491 fold_convert (type, parg0),
9492 fold_convert (type, marg)),
9493 fold_convert (type, parg1));
9494 if (TREE_CODE (parg0) != MULT_EXPR
9495 && TREE_CODE (parg1) == MULT_EXPR)
9496 return fold_build2 (PLUS_EXPR, type,
9497 fold_convert (type, parg0),
9498 fold_build2 (pcode, type,
9499 fold_convert (type, marg),
9506 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9507 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9508 return non_lvalue (fold_convert (type, arg0));
9510 /* Likewise if the operands are reversed. */
9511 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9512 return non_lvalue (fold_convert (type, arg1));
9514 /* Convert X + -C into X - C. */
9515 if (TREE_CODE (arg1) == REAL_CST
9516 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9518 tem = fold_negate_const (arg1, type);
9519 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9520 return fold_build2 (MINUS_EXPR, type,
9521 fold_convert (type, arg0),
9522 fold_convert (type, tem));
9525 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9526 to __complex__ ( x, y ). This is not the same for SNaNs or
9527 if signed zeros are involved. */
9528 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9529 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9530 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9532 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9533 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9534 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9535 bool arg0rz = false, arg0iz = false;
9536 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9537 || (arg0i && (arg0iz = real_zerop (arg0i))))
9539 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9540 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9541 if (arg0rz && arg1i && real_zerop (arg1i))
9543 tree rp = arg1r ? arg1r
9544 : build1 (REALPART_EXPR, rtype, arg1);
9545 tree ip = arg0i ? arg0i
9546 : build1 (IMAGPART_EXPR, rtype, arg0);
9547 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9549 else if (arg0iz && arg1r && real_zerop (arg1r))
9551 tree rp = arg0r ? arg0r
9552 : build1 (REALPART_EXPR, rtype, arg0);
9553 tree ip = arg1i ? arg1i
9554 : build1 (IMAGPART_EXPR, rtype, arg1);
9555 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9560 if (flag_unsafe_math_optimizations
9561 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9562 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9563 && (tem = distribute_real_division (code, type, arg0, arg1)))
9566 /* Convert x+x into x*2.0. */
9567 if (operand_equal_p (arg0, arg1, 0)
9568 && SCALAR_FLOAT_TYPE_P (type))
9569 return fold_build2 (MULT_EXPR, type, arg0,
9570 build_real (type, dconst2));
9572 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9573 We associate floats only if the user has specified
9574 -fassociative-math. */
9575 if (flag_associative_math
9576 && TREE_CODE (arg1) == PLUS_EXPR
9577 && TREE_CODE (arg0) != MULT_EXPR)
9579 tree tree10 = TREE_OPERAND (arg1, 0);
9580 tree tree11 = TREE_OPERAND (arg1, 1);
9581 if (TREE_CODE (tree11) == MULT_EXPR
9582 && TREE_CODE (tree10) == MULT_EXPR)
9585 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9586 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9589 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9590 We associate floats only if the user has specified
9591 -fassociative-math. */
9592 if (flag_associative_math
9593 && TREE_CODE (arg0) == PLUS_EXPR
9594 && TREE_CODE (arg1) != MULT_EXPR)
9596 tree tree00 = TREE_OPERAND (arg0, 0);
9597 tree tree01 = TREE_OPERAND (arg0, 1);
9598 if (TREE_CODE (tree01) == MULT_EXPR
9599 && TREE_CODE (tree00) == MULT_EXPR)
9602 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9603 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9609 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9610 is a rotate of A by C1 bits. */
9611 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9612 is a rotate of A by B bits. */
9614 enum tree_code code0, code1;
9616 code0 = TREE_CODE (arg0);
9617 code1 = TREE_CODE (arg1);
9618 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9619 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9620 && operand_equal_p (TREE_OPERAND (arg0, 0),
9621 TREE_OPERAND (arg1, 0), 0)
9622 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9623 TYPE_UNSIGNED (rtype))
9624 /* Only create rotates in complete modes. Other cases are not
9625 expanded properly. */
9626 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9628 tree tree01, tree11;
9629 enum tree_code code01, code11;
9631 tree01 = TREE_OPERAND (arg0, 1);
9632 tree11 = TREE_OPERAND (arg1, 1);
9633 STRIP_NOPS (tree01);
9634 STRIP_NOPS (tree11);
9635 code01 = TREE_CODE (tree01);
9636 code11 = TREE_CODE (tree11);
9637 if (code01 == INTEGER_CST
9638 && code11 == INTEGER_CST
9639 && TREE_INT_CST_HIGH (tree01) == 0
9640 && TREE_INT_CST_HIGH (tree11) == 0
9641 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9642 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9643 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9644 code0 == LSHIFT_EXPR ? tree01 : tree11);
9645 else if (code11 == MINUS_EXPR)
9647 tree tree110, tree111;
9648 tree110 = TREE_OPERAND (tree11, 0);
9649 tree111 = TREE_OPERAND (tree11, 1);
9650 STRIP_NOPS (tree110);
9651 STRIP_NOPS (tree111);
9652 if (TREE_CODE (tree110) == INTEGER_CST
9653 && 0 == compare_tree_int (tree110,
9655 (TREE_TYPE (TREE_OPERAND
9657 && operand_equal_p (tree01, tree111, 0))
9658 return build2 ((code0 == LSHIFT_EXPR
9661 type, TREE_OPERAND (arg0, 0), tree01);
9663 else if (code01 == MINUS_EXPR)
9665 tree tree010, tree011;
9666 tree010 = TREE_OPERAND (tree01, 0);
9667 tree011 = TREE_OPERAND (tree01, 1);
9668 STRIP_NOPS (tree010);
9669 STRIP_NOPS (tree011);
9670 if (TREE_CODE (tree010) == INTEGER_CST
9671 && 0 == compare_tree_int (tree010,
9673 (TREE_TYPE (TREE_OPERAND
9675 && operand_equal_p (tree11, tree011, 0))
9676 return build2 ((code0 != LSHIFT_EXPR
9679 type, TREE_OPERAND (arg0, 0), tree11);
9685 /* In most languages, can't associate operations on floats through
9686 parentheses. Rather than remember where the parentheses were, we
9687 don't associate floats at all, unless the user has specified
9689 And, we need to make sure type is not saturating. */
9691 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9692 && !TYPE_SATURATING (type))
9694 tree var0, con0, lit0, minus_lit0;
9695 tree var1, con1, lit1, minus_lit1;
9698 /* Split both trees into variables, constants, and literals. Then
9699 associate each group together, the constants with literals,
9700 then the result with variables. This increases the chances of
9701 literals being recombined later and of generating relocatable
9702 expressions for the sum of a constant and literal. */
9703 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9704 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9705 code == MINUS_EXPR);
9707 /* With undefined overflow we can only associate constants
9708 with one variable. */
9709 if ((POINTER_TYPE_P (type)
9710 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9716 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9717 tmp0 = TREE_OPERAND (tmp0, 0);
9718 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9719 tmp1 = TREE_OPERAND (tmp1, 0);
9720 /* The only case we can still associate with two variables
9721 is if they are the same, modulo negation. */
9722 if (!operand_equal_p (tmp0, tmp1, 0))
9726 /* Only do something if we found more than two objects. Otherwise,
9727 nothing has changed and we risk infinite recursion. */
9729 && (2 < ((var0 != 0) + (var1 != 0)
9730 + (con0 != 0) + (con1 != 0)
9731 + (lit0 != 0) + (lit1 != 0)
9732 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9734 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9735 if (code == MINUS_EXPR)
9738 var0 = associate_trees (var0, var1, code, type);
9739 con0 = associate_trees (con0, con1, code, type);
9740 lit0 = associate_trees (lit0, lit1, code, type);
9741 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9743 /* Preserve the MINUS_EXPR if the negative part of the literal is
9744 greater than the positive part. Otherwise, the multiplicative
9745 folding code (i.e extract_muldiv) may be fooled in case
9746 unsigned constants are subtracted, like in the following
9747 example: ((X*2 + 4) - 8U)/2. */
9748 if (minus_lit0 && lit0)
9750 if (TREE_CODE (lit0) == INTEGER_CST
9751 && TREE_CODE (minus_lit0) == INTEGER_CST
9752 && tree_int_cst_lt (lit0, minus_lit0))
9754 minus_lit0 = associate_trees (minus_lit0, lit0,
9760 lit0 = associate_trees (lit0, minus_lit0,
9768 return fold_convert (type,
9769 associate_trees (var0, minus_lit0,
9773 con0 = associate_trees (con0, minus_lit0,
9775 return fold_convert (type,
9776 associate_trees (var0, con0,
9781 con0 = associate_trees (con0, lit0, code, type);
9782 return fold_convert (type, associate_trees (var0, con0,
9790 /* Pointer simplifications for subtraction, simple reassociations. */
9791 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9793 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9794 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9795 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9797 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9798 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9799 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9800 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9801 return fold_build2 (PLUS_EXPR, type,
9802 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9803 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9805 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9806 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9808 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9809 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9810 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9812 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9815 /* A - (-B) -> A + B */
9816 if (TREE_CODE (arg1) == NEGATE_EXPR)
9817 return fold_build2 (PLUS_EXPR, type, op0,
9818 fold_convert (type, TREE_OPERAND (arg1, 0)));
9819 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9820 if (TREE_CODE (arg0) == NEGATE_EXPR
9821 && (FLOAT_TYPE_P (type)
9822 || INTEGRAL_TYPE_P (type))
9823 && negate_expr_p (arg1)
9824 && reorder_operands_p (arg0, arg1))
9825 return fold_build2 (MINUS_EXPR, type,
9826 fold_convert (type, negate_expr (arg1)),
9827 fold_convert (type, TREE_OPERAND (arg0, 0)));
9828 /* Convert -A - 1 to ~A. */
9829 if (INTEGRAL_TYPE_P (type)
9830 && TREE_CODE (arg0) == NEGATE_EXPR
9831 && integer_onep (arg1)
9832 && !TYPE_OVERFLOW_TRAPS (type))
9833 return fold_build1 (BIT_NOT_EXPR, type,
9834 fold_convert (type, TREE_OPERAND (arg0, 0)));
9836 /* Convert -1 - A to ~A. */
9837 if (INTEGRAL_TYPE_P (type)
9838 && integer_all_onesp (arg0))
9839 return fold_build1 (BIT_NOT_EXPR, type, op1);
9842 /* X - (X / CST) * CST is X % CST. */
9843 if (INTEGRAL_TYPE_P (type)
9844 && TREE_CODE (arg1) == MULT_EXPR
9845 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9846 && operand_equal_p (arg0,
9847 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9848 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9849 TREE_OPERAND (arg1, 1), 0))
9850 return fold_convert (type,
9851 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9852 arg0, TREE_OPERAND (arg1, 1)));
9854 if (! FLOAT_TYPE_P (type))
9856 if (integer_zerop (arg0))
9857 return negate_expr (fold_convert (type, arg1));
9858 if (integer_zerop (arg1))
9859 return non_lvalue (fold_convert (type, arg0));
9861 /* Fold A - (A & B) into ~B & A. */
9862 if (!TREE_SIDE_EFFECTS (arg0)
9863 && TREE_CODE (arg1) == BIT_AND_EXPR)
9865 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9867 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9868 return fold_build2 (BIT_AND_EXPR, type,
9869 fold_build1 (BIT_NOT_EXPR, type, arg10),
9870 fold_convert (type, arg0));
9872 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9874 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9875 return fold_build2 (BIT_AND_EXPR, type,
9876 fold_build1 (BIT_NOT_EXPR, type, arg11),
9877 fold_convert (type, arg0));
9881 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9882 any power of 2 minus 1. */
9883 if (TREE_CODE (arg0) == BIT_AND_EXPR
9884 && TREE_CODE (arg1) == BIT_AND_EXPR
9885 && operand_equal_p (TREE_OPERAND (arg0, 0),
9886 TREE_OPERAND (arg1, 0), 0))
9888 tree mask0 = TREE_OPERAND (arg0, 1);
9889 tree mask1 = TREE_OPERAND (arg1, 1);
9890 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9892 if (operand_equal_p (tem, mask1, 0))
9894 tem = fold_build2 (BIT_XOR_EXPR, type,
9895 TREE_OPERAND (arg0, 0), mask1);
9896 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9901 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9902 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9903 return non_lvalue (fold_convert (type, arg0));
9905 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9906 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9907 (-ARG1 + ARG0) reduces to -ARG1. */
9908 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9909 return negate_expr (fold_convert (type, arg1));
9911 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9912 __complex__ ( x, -y ). This is not the same for SNaNs or if
9913 signed zeros are involved. */
9914 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9915 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9916 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9918 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9919 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9920 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9921 bool arg0rz = false, arg0iz = false;
9922 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9923 || (arg0i && (arg0iz = real_zerop (arg0i))))
9925 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9926 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9927 if (arg0rz && arg1i && real_zerop (arg1i))
9929 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9931 : build1 (REALPART_EXPR, rtype, arg1));
9932 tree ip = arg0i ? arg0i
9933 : build1 (IMAGPART_EXPR, rtype, arg0);
9934 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9936 else if (arg0iz && arg1r && real_zerop (arg1r))
9938 tree rp = arg0r ? arg0r
9939 : build1 (REALPART_EXPR, rtype, arg0);
9940 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9942 : build1 (IMAGPART_EXPR, rtype, arg1));
9943 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9948 /* Fold &x - &x. This can happen from &x.foo - &x.
9949 This is unsafe for certain floats even in non-IEEE formats.
9950 In IEEE, it is unsafe because it does wrong for NaNs.
9951 Also note that operand_equal_p is always false if an operand
9954 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
9955 && operand_equal_p (arg0, arg1, 0))
9956 return fold_convert (type, integer_zero_node);
9958 /* A - B -> A + (-B) if B is easily negatable. */
9959 if (negate_expr_p (arg1)
9960 && ((FLOAT_TYPE_P (type)
9961 /* Avoid this transformation if B is a positive REAL_CST. */
9962 && (TREE_CODE (arg1) != REAL_CST
9963 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9964 || INTEGRAL_TYPE_P (type)))
9965 return fold_build2 (PLUS_EXPR, type,
9966 fold_convert (type, arg0),
9967 fold_convert (type, negate_expr (arg1)));
9969 /* Try folding difference of addresses. */
9973 if ((TREE_CODE (arg0) == ADDR_EXPR
9974 || TREE_CODE (arg1) == ADDR_EXPR)
9975 && ptr_difference_const (arg0, arg1, &diff))
9976 return build_int_cst_type (type, diff);
9979 /* Fold &a[i] - &a[j] to i-j. */
9980 if (TREE_CODE (arg0) == ADDR_EXPR
9981 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9982 && TREE_CODE (arg1) == ADDR_EXPR
9983 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9985 tree aref0 = TREE_OPERAND (arg0, 0);
9986 tree aref1 = TREE_OPERAND (arg1, 0);
9987 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9988 TREE_OPERAND (aref1, 0), 0))
9990 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9991 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9992 tree esz = array_ref_element_size (aref0);
9993 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9994 return fold_build2 (MULT_EXPR, type, diff,
9995 fold_convert (type, esz));
10000 if (flag_unsafe_math_optimizations
10001 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10002 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10003 && (tem = distribute_real_division (code, type, arg0, arg1)))
10006 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10007 same or one. Make sure type is not saturating.
10008 fold_plusminus_mult_expr will re-associate. */
10009 if ((TREE_CODE (arg0) == MULT_EXPR
10010 || TREE_CODE (arg1) == MULT_EXPR)
10011 && !TYPE_SATURATING (type)
10012 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10014 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10022 /* (-A) * (-B) -> A * B */
10023 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10024 return fold_build2 (MULT_EXPR, type,
10025 fold_convert (type, TREE_OPERAND (arg0, 0)),
10026 fold_convert (type, negate_expr (arg1)));
10027 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10028 return fold_build2 (MULT_EXPR, type,
10029 fold_convert (type, negate_expr (arg0)),
10030 fold_convert (type, TREE_OPERAND (arg1, 0)));
10032 if (! FLOAT_TYPE_P (type))
10034 if (integer_zerop (arg1))
10035 return omit_one_operand (type, arg1, arg0);
10036 if (integer_onep (arg1))
10037 return non_lvalue (fold_convert (type, arg0));
10038 /* Transform x * -1 into -x. Make sure to do the negation
10039 on the original operand with conversions not stripped
10040 because we can only strip non-sign-changing conversions. */
10041 if (integer_all_onesp (arg1))
10042 return fold_convert (type, negate_expr (op0));
10043 /* Transform x * -C into -x * C if x is easily negatable. */
10044 if (TREE_CODE (arg1) == INTEGER_CST
10045 && tree_int_cst_sgn (arg1) == -1
10046 && negate_expr_p (arg0)
10047 && (tem = negate_expr (arg1)) != arg1
10048 && !TREE_OVERFLOW (tem))
10049 return fold_build2 (MULT_EXPR, type,
10050 fold_convert (type, negate_expr (arg0)), tem);
10052 /* (a * (1 << b)) is (a << b) */
10053 if (TREE_CODE (arg1) == LSHIFT_EXPR
10054 && integer_onep (TREE_OPERAND (arg1, 0)))
10055 return fold_build2 (LSHIFT_EXPR, type, op0,
10056 TREE_OPERAND (arg1, 1));
10057 if (TREE_CODE (arg0) == LSHIFT_EXPR
10058 && integer_onep (TREE_OPERAND (arg0, 0)))
10059 return fold_build2 (LSHIFT_EXPR, type, op1,
10060 TREE_OPERAND (arg0, 1));
10062 strict_overflow_p = false;
10063 if (TREE_CODE (arg1) == INTEGER_CST
10064 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10065 &strict_overflow_p)))
10067 if (strict_overflow_p)
10068 fold_overflow_warning (("assuming signed overflow does not "
10069 "occur when simplifying "
10071 WARN_STRICT_OVERFLOW_MISC);
10072 return fold_convert (type, tem);
10075 /* Optimize z * conj(z) for integer complex numbers. */
10076 if (TREE_CODE (arg0) == CONJ_EXPR
10077 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10078 return fold_mult_zconjz (type, arg1);
10079 if (TREE_CODE (arg1) == CONJ_EXPR
10080 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10081 return fold_mult_zconjz (type, arg0);
10085 /* Maybe fold x * 0 to 0. The expressions aren't the same
10086 when x is NaN, since x * 0 is also NaN. Nor are they the
10087 same in modes with signed zeros, since multiplying a
10088 negative value by 0 gives -0, not +0. */
10089 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10090 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10091 && real_zerop (arg1))
10092 return omit_one_operand (type, arg1, arg0);
10093 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10094 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10095 && real_onep (arg1))
10096 return non_lvalue (fold_convert (type, arg0));
10098 /* Transform x * -1.0 into -x. */
10099 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10100 && real_minus_onep (arg1))
10101 return fold_convert (type, negate_expr (arg0));
10103 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10104 the result for floating point types due to rounding so it is applied
10105 only if -fassociative-math was specify. */
10106 if (flag_associative_math
10107 && TREE_CODE (arg0) == RDIV_EXPR
10108 && TREE_CODE (arg1) == REAL_CST
10109 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10111 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10114 return fold_build2 (RDIV_EXPR, type, tem,
10115 TREE_OPERAND (arg0, 1));
10118 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10119 if (operand_equal_p (arg0, arg1, 0))
10121 tree tem = fold_strip_sign_ops (arg0);
10122 if (tem != NULL_TREE)
10124 tem = fold_convert (type, tem);
10125 return fold_build2 (MULT_EXPR, type, tem, tem);
10129 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10130 This is not the same for NaNs or if signed zeros are
10132 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10133 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10134 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10135 && TREE_CODE (arg1) == COMPLEX_CST
10136 && real_zerop (TREE_REALPART (arg1)))
10138 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10139 if (real_onep (TREE_IMAGPART (arg1)))
10140 return fold_build2 (COMPLEX_EXPR, type,
10141 negate_expr (fold_build1 (IMAGPART_EXPR,
10143 fold_build1 (REALPART_EXPR, rtype, arg0));
10144 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10145 return fold_build2 (COMPLEX_EXPR, type,
10146 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10147 negate_expr (fold_build1 (REALPART_EXPR,
10151 /* Optimize z * conj(z) for floating point complex numbers.
10152 Guarded by flag_unsafe_math_optimizations as non-finite
10153 imaginary components don't produce scalar results. */
10154 if (flag_unsafe_math_optimizations
10155 && TREE_CODE (arg0) == CONJ_EXPR
10156 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10157 return fold_mult_zconjz (type, arg1);
10158 if (flag_unsafe_math_optimizations
10159 && TREE_CODE (arg1) == CONJ_EXPR
10160 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10161 return fold_mult_zconjz (type, arg0);
10163 if (flag_unsafe_math_optimizations)
10165 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10166 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10168 /* Optimizations of root(...)*root(...). */
10169 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10172 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10173 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10175 /* Optimize sqrt(x)*sqrt(x) as x. */
10176 if (BUILTIN_SQRT_P (fcode0)
10177 && operand_equal_p (arg00, arg10, 0)
10178 && ! HONOR_SNANS (TYPE_MODE (type)))
10181 /* Optimize root(x)*root(y) as root(x*y). */
10182 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10183 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10184 return build_call_expr (rootfn, 1, arg);
10187 /* Optimize expN(x)*expN(y) as expN(x+y). */
10188 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10190 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10191 tree arg = fold_build2 (PLUS_EXPR, type,
10192 CALL_EXPR_ARG (arg0, 0),
10193 CALL_EXPR_ARG (arg1, 0));
10194 return build_call_expr (expfn, 1, arg);
10197 /* Optimizations of pow(...)*pow(...). */
10198 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10199 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10200 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10202 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10203 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10204 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10205 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10207 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10208 if (operand_equal_p (arg01, arg11, 0))
10210 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10211 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10212 return build_call_expr (powfn, 2, arg, arg01);
10215 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10216 if (operand_equal_p (arg00, arg10, 0))
10218 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10219 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10220 return build_call_expr (powfn, 2, arg00, arg);
10224 /* Optimize tan(x)*cos(x) as sin(x). */
10225 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10226 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10227 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10228 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10229 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10230 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10231 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10232 CALL_EXPR_ARG (arg1, 0), 0))
10234 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10236 if (sinfn != NULL_TREE)
10237 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10240 /* Optimize x*pow(x,c) as pow(x,c+1). */
10241 if (fcode1 == BUILT_IN_POW
10242 || fcode1 == BUILT_IN_POWF
10243 || fcode1 == BUILT_IN_POWL)
10245 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10246 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10247 if (TREE_CODE (arg11) == REAL_CST
10248 && !TREE_OVERFLOW (arg11)
10249 && operand_equal_p (arg0, arg10, 0))
10251 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10255 c = TREE_REAL_CST (arg11);
10256 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10257 arg = build_real (type, c);
10258 return build_call_expr (powfn, 2, arg0, arg);
10262 /* Optimize pow(x,c)*x as pow(x,c+1). */
10263 if (fcode0 == BUILT_IN_POW
10264 || fcode0 == BUILT_IN_POWF
10265 || fcode0 == BUILT_IN_POWL)
10267 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10268 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10269 if (TREE_CODE (arg01) == REAL_CST
10270 && !TREE_OVERFLOW (arg01)
10271 && operand_equal_p (arg1, arg00, 0))
10273 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10277 c = TREE_REAL_CST (arg01);
10278 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10279 arg = build_real (type, c);
10280 return build_call_expr (powfn, 2, arg1, arg);
10284 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10285 if (! optimize_size
10286 && operand_equal_p (arg0, arg1, 0))
10288 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10292 tree arg = build_real (type, dconst2);
10293 return build_call_expr (powfn, 2, arg0, arg);
10302 if (integer_all_onesp (arg1))
10303 return omit_one_operand (type, arg1, arg0);
10304 if (integer_zerop (arg1))
10305 return non_lvalue (fold_convert (type, arg0));
10306 if (operand_equal_p (arg0, arg1, 0))
10307 return non_lvalue (fold_convert (type, arg0));
10309 /* ~X | X is -1. */
10310 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10311 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10313 t1 = fold_convert (type, integer_zero_node);
10314 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10315 return omit_one_operand (type, t1, arg1);
10318 /* X | ~X is -1. */
10319 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10320 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10322 t1 = fold_convert (type, integer_zero_node);
10323 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10324 return omit_one_operand (type, t1, arg0);
10327 /* Canonicalize (X & C1) | C2. */
10328 if (TREE_CODE (arg0) == BIT_AND_EXPR
10329 && TREE_CODE (arg1) == INTEGER_CST
10330 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10332 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10333 int width = TYPE_PRECISION (type), w;
10334 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10335 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10336 hi2 = TREE_INT_CST_HIGH (arg1);
10337 lo2 = TREE_INT_CST_LOW (arg1);
10339 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10340 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10341 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10343 if (width > HOST_BITS_PER_WIDE_INT)
10345 mhi = (unsigned HOST_WIDE_INT) -1
10346 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10352 mlo = (unsigned HOST_WIDE_INT) -1
10353 >> (HOST_BITS_PER_WIDE_INT - width);
10356 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10357 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10358 return fold_build2 (BIT_IOR_EXPR, type,
10359 TREE_OPERAND (arg0, 0), arg1);
10361 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10362 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10363 mode which allows further optimizations. */
10370 for (w = BITS_PER_UNIT;
10371 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10374 unsigned HOST_WIDE_INT mask
10375 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10376 if (((lo1 | lo2) & mask) == mask
10377 && (lo1 & ~mask) == 0 && hi1 == 0)
10384 if (hi3 != hi1 || lo3 != lo1)
10385 return fold_build2 (BIT_IOR_EXPR, type,
10386 fold_build2 (BIT_AND_EXPR, type,
10387 TREE_OPERAND (arg0, 0),
10388 build_int_cst_wide (type,
10393 /* (X & Y) | Y is (X, Y). */
10394 if (TREE_CODE (arg0) == BIT_AND_EXPR
10395 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10396 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10397 /* (X & Y) | X is (Y, X). */
10398 if (TREE_CODE (arg0) == BIT_AND_EXPR
10399 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10400 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10401 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10402 /* X | (X & Y) is (Y, X). */
10403 if (TREE_CODE (arg1) == BIT_AND_EXPR
10404 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10405 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10406 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10407 /* X | (Y & X) is (Y, X). */
10408 if (TREE_CODE (arg1) == BIT_AND_EXPR
10409 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10410 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10411 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10413 t1 = distribute_bit_expr (code, type, arg0, arg1);
10414 if (t1 != NULL_TREE)
10417 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10419 This results in more efficient code for machines without a NAND
10420 instruction. Combine will canonicalize to the first form
10421 which will allow use of NAND instructions provided by the
10422 backend if they exist. */
10423 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10424 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10426 return fold_build1 (BIT_NOT_EXPR, type,
10427 build2 (BIT_AND_EXPR, type,
10428 fold_convert (type,
10429 TREE_OPERAND (arg0, 0)),
10430 fold_convert (type,
10431 TREE_OPERAND (arg1, 0))));
10434 /* See if this can be simplified into a rotate first. If that
10435 is unsuccessful continue in the association code. */
10439 if (integer_zerop (arg1))
10440 return non_lvalue (fold_convert (type, arg0));
10441 if (integer_all_onesp (arg1))
10442 return fold_build1 (BIT_NOT_EXPR, type, op0);
10443 if (operand_equal_p (arg0, arg1, 0))
10444 return omit_one_operand (type, integer_zero_node, arg0);
10446 /* ~X ^ X is -1. */
10447 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10450 t1 = fold_convert (type, integer_zero_node);
10451 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10452 return omit_one_operand (type, t1, arg1);
10455 /* X ^ ~X is -1. */
10456 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10457 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10459 t1 = fold_convert (type, integer_zero_node);
10460 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10461 return omit_one_operand (type, t1, arg0);
10464 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10465 with a constant, and the two constants have no bits in common,
10466 we should treat this as a BIT_IOR_EXPR since this may produce more
10467 simplifications. */
10468 if (TREE_CODE (arg0) == BIT_AND_EXPR
10469 && TREE_CODE (arg1) == BIT_AND_EXPR
10470 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10471 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10472 && integer_zerop (const_binop (BIT_AND_EXPR,
10473 TREE_OPERAND (arg0, 1),
10474 TREE_OPERAND (arg1, 1), 0)))
10476 code = BIT_IOR_EXPR;
10480 /* (X | Y) ^ X -> Y & ~ X*/
10481 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10482 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10484 tree t2 = TREE_OPERAND (arg0, 1);
10485 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10487 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10488 fold_convert (type, t1));
10492 /* (Y | X) ^ X -> Y & ~ X*/
10493 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10494 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10496 tree t2 = TREE_OPERAND (arg0, 0);
10497 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10499 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10500 fold_convert (type, t1));
10504 /* X ^ (X | Y) -> Y & ~ X*/
10505 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10506 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10508 tree t2 = TREE_OPERAND (arg1, 1);
10509 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10511 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10512 fold_convert (type, t1));
10516 /* X ^ (Y | X) -> Y & ~ X*/
10517 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10518 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10520 tree t2 = TREE_OPERAND (arg1, 0);
10521 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10523 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10524 fold_convert (type, t1));
10528 /* Convert ~X ^ ~Y to X ^ Y. */
10529 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10530 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10531 return fold_build2 (code, type,
10532 fold_convert (type, TREE_OPERAND (arg0, 0)),
10533 fold_convert (type, TREE_OPERAND (arg1, 0)));
10535 /* Convert ~X ^ C to X ^ ~C. */
10536 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10537 && TREE_CODE (arg1) == INTEGER_CST)
10538 return fold_build2 (code, type,
10539 fold_convert (type, TREE_OPERAND (arg0, 0)),
10540 fold_build1 (BIT_NOT_EXPR, type, arg1));
10542 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10543 if (TREE_CODE (arg0) == BIT_AND_EXPR
10544 && integer_onep (TREE_OPERAND (arg0, 1))
10545 && integer_onep (arg1))
10546 return fold_build2 (EQ_EXPR, type, arg0,
10547 build_int_cst (TREE_TYPE (arg0), 0));
10549 /* Fold (X & Y) ^ Y as ~X & Y. */
10550 if (TREE_CODE (arg0) == BIT_AND_EXPR
10551 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10553 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10554 return fold_build2 (BIT_AND_EXPR, type,
10555 fold_build1 (BIT_NOT_EXPR, type, tem),
10556 fold_convert (type, arg1));
10558 /* Fold (X & Y) ^ X as ~Y & X. */
10559 if (TREE_CODE (arg0) == BIT_AND_EXPR
10560 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10561 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10563 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10564 return fold_build2 (BIT_AND_EXPR, type,
10565 fold_build1 (BIT_NOT_EXPR, type, tem),
10566 fold_convert (type, arg1));
10568 /* Fold X ^ (X & Y) as X & ~Y. */
10569 if (TREE_CODE (arg1) == BIT_AND_EXPR
10570 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10572 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10573 return fold_build2 (BIT_AND_EXPR, type,
10574 fold_convert (type, arg0),
10575 fold_build1 (BIT_NOT_EXPR, type, tem));
10577 /* Fold X ^ (Y & X) as ~Y & X. */
10578 if (TREE_CODE (arg1) == BIT_AND_EXPR
10579 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10580 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10582 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10583 return fold_build2 (BIT_AND_EXPR, type,
10584 fold_build1 (BIT_NOT_EXPR, type, tem),
10585 fold_convert (type, arg0));
10588 /* See if this can be simplified into a rotate first. If that
10589 is unsuccessful continue in the association code. */
10593 if (integer_all_onesp (arg1))
10594 return non_lvalue (fold_convert (type, arg0));
10595 if (integer_zerop (arg1))
10596 return omit_one_operand (type, arg1, arg0);
10597 if (operand_equal_p (arg0, arg1, 0))
10598 return non_lvalue (fold_convert (type, arg0));
10600 /* ~X & X is always zero. */
10601 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10603 return omit_one_operand (type, integer_zero_node, arg1);
10605 /* X & ~X is always zero. */
10606 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10607 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10608 return omit_one_operand (type, integer_zero_node, arg0);
10610 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10611 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10612 && TREE_CODE (arg1) == INTEGER_CST
10613 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10615 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10616 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10617 TREE_OPERAND (arg0, 0), tmp1);
10618 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10619 TREE_OPERAND (arg0, 1), tmp1);
10620 return fold_convert (type,
10621 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10625 /* (X | Y) & Y is (X, Y). */
10626 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10627 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10628 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10629 /* (X | Y) & X is (Y, X). */
10630 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10631 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10632 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10633 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10634 /* X & (X | Y) is (Y, X). */
10635 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10636 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10637 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10638 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10639 /* X & (Y | X) is (Y, X). */
10640 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10641 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10642 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10643 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10645 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10646 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10647 && integer_onep (TREE_OPERAND (arg0, 1))
10648 && integer_onep (arg1))
10650 tem = TREE_OPERAND (arg0, 0);
10651 return fold_build2 (EQ_EXPR, type,
10652 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10653 build_int_cst (TREE_TYPE (tem), 1)),
10654 build_int_cst (TREE_TYPE (tem), 0));
10656 /* Fold ~X & 1 as (X & 1) == 0. */
10657 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10658 && integer_onep (arg1))
10660 tem = TREE_OPERAND (arg0, 0);
10661 return fold_build2 (EQ_EXPR, type,
10662 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10663 build_int_cst (TREE_TYPE (tem), 1)),
10664 build_int_cst (TREE_TYPE (tem), 0));
10667 /* Fold (X ^ Y) & Y as ~X & Y. */
10668 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10669 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10671 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10672 return fold_build2 (BIT_AND_EXPR, type,
10673 fold_build1 (BIT_NOT_EXPR, type, tem),
10674 fold_convert (type, arg1));
10676 /* Fold (X ^ Y) & X as ~Y & X. */
10677 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10678 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10679 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10681 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10682 return fold_build2 (BIT_AND_EXPR, type,
10683 fold_build1 (BIT_NOT_EXPR, type, tem),
10684 fold_convert (type, arg1));
10686 /* Fold X & (X ^ Y) as X & ~Y. */
10687 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10688 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10690 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10691 return fold_build2 (BIT_AND_EXPR, type,
10692 fold_convert (type, arg0),
10693 fold_build1 (BIT_NOT_EXPR, type, tem));
10695 /* Fold X & (Y ^ X) as ~Y & X. */
10696 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10697 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10698 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10700 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10701 return fold_build2 (BIT_AND_EXPR, type,
10702 fold_build1 (BIT_NOT_EXPR, type, tem),
10703 fold_convert (type, arg0));
10706 t1 = distribute_bit_expr (code, type, arg0, arg1);
10707 if (t1 != NULL_TREE)
10709 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10710 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10711 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10714 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10716 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10717 && (~TREE_INT_CST_LOW (arg1)
10718 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10719 return fold_convert (type, TREE_OPERAND (arg0, 0));
10722 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10724 This results in more efficient code for machines without a NOR
10725 instruction. Combine will canonicalize to the first form
10726 which will allow use of NOR instructions provided by the
10727 backend if they exist. */
10728 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10729 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10731 return fold_build1 (BIT_NOT_EXPR, type,
10732 build2 (BIT_IOR_EXPR, type,
10733 fold_convert (type,
10734 TREE_OPERAND (arg0, 0)),
10735 fold_convert (type,
10736 TREE_OPERAND (arg1, 0))));
10739 /* If arg0 is derived from the address of an object or function, we may
10740 be able to fold this expression using the object or function's
10742 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10744 unsigned HOST_WIDE_INT modulus, residue;
10745 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10747 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10749 /* This works because modulus is a power of 2. If this weren't the
10750 case, we'd have to replace it by its greatest power-of-2
10751 divisor: modulus & -modulus. */
10753 return build_int_cst (type, residue & low);
10756 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10757 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10758 if the new mask might be further optimized. */
10759 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10760 || TREE_CODE (arg0) == RSHIFT_EXPR)
10761 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10762 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10763 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10764 < TYPE_PRECISION (TREE_TYPE (arg0))
10765 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10766 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10768 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10769 unsigned HOST_WIDE_INT mask
10770 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10771 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10772 tree shift_type = TREE_TYPE (arg0);
10774 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10775 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10776 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10777 && TYPE_PRECISION (TREE_TYPE (arg0))
10778 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10780 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10781 tree arg00 = TREE_OPERAND (arg0, 0);
10782 /* See if more bits can be proven as zero because of
10784 if (TREE_CODE (arg00) == NOP_EXPR
10785 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10787 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10788 if (TYPE_PRECISION (inner_type)
10789 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10790 && TYPE_PRECISION (inner_type) < prec)
10792 prec = TYPE_PRECISION (inner_type);
10793 /* See if we can shorten the right shift. */
10795 shift_type = inner_type;
10798 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10799 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10800 zerobits <<= prec - shiftc;
10801 /* For arithmetic shift if sign bit could be set, zerobits
10802 can contain actually sign bits, so no transformation is
10803 possible, unless MASK masks them all away. In that
10804 case the shift needs to be converted into logical shift. */
10805 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10806 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10808 if ((mask & zerobits) == 0)
10809 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10815 /* ((X << 16) & 0xff00) is (X, 0). */
10816 if ((mask & zerobits) == mask)
10817 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10819 newmask = mask | zerobits;
10820 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10824 /* Only do the transformation if NEWMASK is some integer
10826 for (prec = BITS_PER_UNIT;
10827 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10828 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10830 if (prec < HOST_BITS_PER_WIDE_INT
10831 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10833 if (shift_type != TREE_TYPE (arg0))
10835 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10836 fold_convert (shift_type,
10837 TREE_OPERAND (arg0, 0)),
10838 TREE_OPERAND (arg0, 1));
10839 tem = fold_convert (type, tem);
10843 return fold_build2 (BIT_AND_EXPR, type, tem,
10844 build_int_cst_type (TREE_TYPE (op1),
10853 /* Don't touch a floating-point divide by zero unless the mode
10854 of the constant can represent infinity. */
10855 if (TREE_CODE (arg1) == REAL_CST
10856 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10857 && real_zerop (arg1))
10860 /* Optimize A / A to 1.0 if we don't care about
10861 NaNs or Infinities. Skip the transformation
10862 for non-real operands. */
10863 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10864 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10865 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10866 && operand_equal_p (arg0, arg1, 0))
10868 tree r = build_real (TREE_TYPE (arg0), dconst1);
10870 return omit_two_operands (type, r, arg0, arg1);
10873 /* The complex version of the above A / A optimization. */
10874 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10875 && operand_equal_p (arg0, arg1, 0))
10877 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10878 if (! HONOR_NANS (TYPE_MODE (elem_type))
10879 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10881 tree r = build_real (elem_type, dconst1);
10882 /* omit_two_operands will call fold_convert for us. */
10883 return omit_two_operands (type, r, arg0, arg1);
10887 /* (-A) / (-B) -> A / B */
10888 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10889 return fold_build2 (RDIV_EXPR, type,
10890 TREE_OPERAND (arg0, 0),
10891 negate_expr (arg1));
10892 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10893 return fold_build2 (RDIV_EXPR, type,
10894 negate_expr (arg0),
10895 TREE_OPERAND (arg1, 0));
10897 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10898 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10899 && real_onep (arg1))
10900 return non_lvalue (fold_convert (type, arg0));
10902 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10903 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10904 && real_minus_onep (arg1))
10905 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10907 /* If ARG1 is a constant, we can convert this to a multiply by the
10908 reciprocal. This does not have the same rounding properties,
10909 so only do this if -freciprocal-math. We can actually
10910 always safely do it if ARG1 is a power of two, but it's hard to
10911 tell if it is or not in a portable manner. */
10912 if (TREE_CODE (arg1) == REAL_CST)
10914 if (flag_reciprocal_math
10915 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10917 return fold_build2 (MULT_EXPR, type, arg0, tem);
10918 /* Find the reciprocal if optimizing and the result is exact. */
10922 r = TREE_REAL_CST (arg1);
10923 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10925 tem = build_real (type, r);
10926 return fold_build2 (MULT_EXPR, type,
10927 fold_convert (type, arg0), tem);
10931 /* Convert A/B/C to A/(B*C). */
10932 if (flag_reciprocal_math
10933 && TREE_CODE (arg0) == RDIV_EXPR)
10934 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10935 fold_build2 (MULT_EXPR, type,
10936 TREE_OPERAND (arg0, 1), arg1));
10938 /* Convert A/(B/C) to (A/B)*C. */
10939 if (flag_reciprocal_math
10940 && TREE_CODE (arg1) == RDIV_EXPR)
10941 return fold_build2 (MULT_EXPR, type,
10942 fold_build2 (RDIV_EXPR, type, arg0,
10943 TREE_OPERAND (arg1, 0)),
10944 TREE_OPERAND (arg1, 1));
10946 /* Convert C1/(X*C2) into (C1/C2)/X. */
10947 if (flag_reciprocal_math
10948 && TREE_CODE (arg1) == MULT_EXPR
10949 && TREE_CODE (arg0) == REAL_CST
10950 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10952 tree tem = const_binop (RDIV_EXPR, arg0,
10953 TREE_OPERAND (arg1, 1), 0);
10955 return fold_build2 (RDIV_EXPR, type, tem,
10956 TREE_OPERAND (arg1, 0));
10959 if (flag_unsafe_math_optimizations)
10961 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10962 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10964 /* Optimize sin(x)/cos(x) as tan(x). */
10965 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10966 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10967 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10968 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10969 CALL_EXPR_ARG (arg1, 0), 0))
10971 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10973 if (tanfn != NULL_TREE)
10974 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10977 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10978 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10979 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10980 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10981 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10982 CALL_EXPR_ARG (arg1, 0), 0))
10984 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10986 if (tanfn != NULL_TREE)
10988 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10989 return fold_build2 (RDIV_EXPR, type,
10990 build_real (type, dconst1), tmp);
10994 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10995 NaNs or Infinities. */
10996 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10997 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10998 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11000 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11001 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11003 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11004 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11005 && operand_equal_p (arg00, arg01, 0))
11007 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11009 if (cosfn != NULL_TREE)
11010 return build_call_expr (cosfn, 1, arg00);
11014 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11015 NaNs or Infinities. */
11016 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11017 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11018 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11020 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11021 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11023 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11024 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11025 && operand_equal_p (arg00, arg01, 0))
11027 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11029 if (cosfn != NULL_TREE)
11031 tree tmp = build_call_expr (cosfn, 1, arg00);
11032 return fold_build2 (RDIV_EXPR, type,
11033 build_real (type, dconst1),
11039 /* Optimize pow(x,c)/x as pow(x,c-1). */
11040 if (fcode0 == BUILT_IN_POW
11041 || fcode0 == BUILT_IN_POWF
11042 || fcode0 == BUILT_IN_POWL)
11044 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11045 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11046 if (TREE_CODE (arg01) == REAL_CST
11047 && !TREE_OVERFLOW (arg01)
11048 && operand_equal_p (arg1, arg00, 0))
11050 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11054 c = TREE_REAL_CST (arg01);
11055 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11056 arg = build_real (type, c);
11057 return build_call_expr (powfn, 2, arg1, arg);
11061 /* Optimize a/root(b/c) into a*root(c/b). */
11062 if (BUILTIN_ROOT_P (fcode1))
11064 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11066 if (TREE_CODE (rootarg) == RDIV_EXPR)
11068 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11069 tree b = TREE_OPERAND (rootarg, 0);
11070 tree c = TREE_OPERAND (rootarg, 1);
11072 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11074 tmp = build_call_expr (rootfn, 1, tmp);
11075 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11079 /* Optimize x/expN(y) into x*expN(-y). */
11080 if (BUILTIN_EXPONENT_P (fcode1))
11082 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11083 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11084 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11085 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11088 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11089 if (fcode1 == BUILT_IN_POW
11090 || fcode1 == BUILT_IN_POWF
11091 || fcode1 == BUILT_IN_POWL)
11093 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11094 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11095 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11096 tree neg11 = fold_convert (type, negate_expr (arg11));
11097 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11098 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11103 case TRUNC_DIV_EXPR:
11104 case FLOOR_DIV_EXPR:
11105 /* Simplify A / (B << N) where A and B are positive and B is
11106 a power of 2, to A >> (N + log2(B)). */
11107 strict_overflow_p = false;
11108 if (TREE_CODE (arg1) == LSHIFT_EXPR
11109 && (TYPE_UNSIGNED (type)
11110 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11112 tree sval = TREE_OPERAND (arg1, 0);
11113 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11115 tree sh_cnt = TREE_OPERAND (arg1, 1);
11116 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11118 if (strict_overflow_p)
11119 fold_overflow_warning (("assuming signed overflow does not "
11120 "occur when simplifying A / (B << N)"),
11121 WARN_STRICT_OVERFLOW_MISC);
11123 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11124 sh_cnt, build_int_cst (NULL_TREE, pow2));
11125 return fold_build2 (RSHIFT_EXPR, type,
11126 fold_convert (type, arg0), sh_cnt);
11130 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11131 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11132 if (INTEGRAL_TYPE_P (type)
11133 && TYPE_UNSIGNED (type)
11134 && code == FLOOR_DIV_EXPR)
11135 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11139 case ROUND_DIV_EXPR:
11140 case CEIL_DIV_EXPR:
11141 case EXACT_DIV_EXPR:
11142 if (integer_onep (arg1))
11143 return non_lvalue (fold_convert (type, arg0));
11144 if (integer_zerop (arg1))
11146 /* X / -1 is -X. */
11147 if (!TYPE_UNSIGNED (type)
11148 && TREE_CODE (arg1) == INTEGER_CST
11149 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11150 && TREE_INT_CST_HIGH (arg1) == -1)
11151 return fold_convert (type, negate_expr (arg0));
11153 /* Convert -A / -B to A / B when the type is signed and overflow is
11155 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11156 && TREE_CODE (arg0) == NEGATE_EXPR
11157 && negate_expr_p (arg1))
11159 if (INTEGRAL_TYPE_P (type))
11160 fold_overflow_warning (("assuming signed overflow does not occur "
11161 "when distributing negation across "
11163 WARN_STRICT_OVERFLOW_MISC);
11164 return fold_build2 (code, type,
11165 fold_convert (type, TREE_OPERAND (arg0, 0)),
11166 negate_expr (arg1));
11168 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11169 && TREE_CODE (arg1) == NEGATE_EXPR
11170 && negate_expr_p (arg0))
11172 if (INTEGRAL_TYPE_P (type))
11173 fold_overflow_warning (("assuming signed overflow does not occur "
11174 "when distributing negation across "
11176 WARN_STRICT_OVERFLOW_MISC);
11177 return fold_build2 (code, type, negate_expr (arg0),
11178 TREE_OPERAND (arg1, 0));
11181 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11182 operation, EXACT_DIV_EXPR.
11184 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11185 At one time others generated faster code, it's not clear if they do
11186 after the last round to changes to the DIV code in expmed.c. */
11187 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11188 && multiple_of_p (type, arg0, arg1))
11189 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11191 strict_overflow_p = false;
11192 if (TREE_CODE (arg1) == INTEGER_CST
11193 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11194 &strict_overflow_p)))
11196 if (strict_overflow_p)
11197 fold_overflow_warning (("assuming signed overflow does not occur "
11198 "when simplifying division"),
11199 WARN_STRICT_OVERFLOW_MISC);
11200 return fold_convert (type, tem);
11205 case CEIL_MOD_EXPR:
11206 case FLOOR_MOD_EXPR:
11207 case ROUND_MOD_EXPR:
11208 case TRUNC_MOD_EXPR:
11209 /* X % 1 is always zero, but be sure to preserve any side
11211 if (integer_onep (arg1))
11212 return omit_one_operand (type, integer_zero_node, arg0);
11214 /* X % 0, return X % 0 unchanged so that we can get the
11215 proper warnings and errors. */
11216 if (integer_zerop (arg1))
11219 /* 0 % X is always zero, but be sure to preserve any side
11220 effects in X. Place this after checking for X == 0. */
11221 if (integer_zerop (arg0))
11222 return omit_one_operand (type, integer_zero_node, arg1);
11224 /* X % -1 is zero. */
11225 if (!TYPE_UNSIGNED (type)
11226 && TREE_CODE (arg1) == INTEGER_CST
11227 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11228 && TREE_INT_CST_HIGH (arg1) == -1)
11229 return omit_one_operand (type, integer_zero_node, arg0);
11231 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11232 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11233 strict_overflow_p = false;
11234 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11235 && (TYPE_UNSIGNED (type)
11236 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11239 /* Also optimize A % (C << N) where C is a power of 2,
11240 to A & ((C << N) - 1). */
11241 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11242 c = TREE_OPERAND (arg1, 0);
11244 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11246 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11247 build_int_cst (TREE_TYPE (arg1), 1));
11248 if (strict_overflow_p)
11249 fold_overflow_warning (("assuming signed overflow does not "
11250 "occur when simplifying "
11251 "X % (power of two)"),
11252 WARN_STRICT_OVERFLOW_MISC);
11253 return fold_build2 (BIT_AND_EXPR, type,
11254 fold_convert (type, arg0),
11255 fold_convert (type, mask));
11259 /* X % -C is the same as X % C. */
11260 if (code == TRUNC_MOD_EXPR
11261 && !TYPE_UNSIGNED (type)
11262 && TREE_CODE (arg1) == INTEGER_CST
11263 && !TREE_OVERFLOW (arg1)
11264 && TREE_INT_CST_HIGH (arg1) < 0
11265 && !TYPE_OVERFLOW_TRAPS (type)
11266 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11267 && !sign_bit_p (arg1, arg1))
11268 return fold_build2 (code, type, fold_convert (type, arg0),
11269 fold_convert (type, negate_expr (arg1)));
11271 /* X % -Y is the same as X % Y. */
11272 if (code == TRUNC_MOD_EXPR
11273 && !TYPE_UNSIGNED (type)
11274 && TREE_CODE (arg1) == NEGATE_EXPR
11275 && !TYPE_OVERFLOW_TRAPS (type))
11276 return fold_build2 (code, type, fold_convert (type, arg0),
11277 fold_convert (type, TREE_OPERAND (arg1, 0)));
11279 if (TREE_CODE (arg1) == INTEGER_CST
11280 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11281 &strict_overflow_p)))
11283 if (strict_overflow_p)
11284 fold_overflow_warning (("assuming signed overflow does not occur "
11285 "when simplifying modulos"),
11286 WARN_STRICT_OVERFLOW_MISC);
11287 return fold_convert (type, tem);
11294 if (integer_all_onesp (arg0))
11295 return omit_one_operand (type, arg0, arg1);
11299 /* Optimize -1 >> x for arithmetic right shifts. */
11300 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11301 return omit_one_operand (type, arg0, arg1);
11302 /* ... fall through ... */
11306 if (integer_zerop (arg1))
11307 return non_lvalue (fold_convert (type, arg0));
11308 if (integer_zerop (arg0))
11309 return omit_one_operand (type, arg0, arg1);
11311 /* Since negative shift count is not well-defined,
11312 don't try to compute it in the compiler. */
11313 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11316 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11317 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11318 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11319 && host_integerp (TREE_OPERAND (arg0, 1), false)
11320 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11322 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11323 + TREE_INT_CST_LOW (arg1));
11325 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11326 being well defined. */
11327 if (low >= TYPE_PRECISION (type))
11329 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11330 low = low % TYPE_PRECISION (type);
11331 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11332 return build_int_cst (type, 0);
11334 low = TYPE_PRECISION (type) - 1;
11337 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11338 build_int_cst (type, low));
11341 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11342 into x & ((unsigned)-1 >> c) for unsigned types. */
11343 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11344 || (TYPE_UNSIGNED (type)
11345 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11346 && host_integerp (arg1, false)
11347 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11348 && host_integerp (TREE_OPERAND (arg0, 1), false)
11349 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11351 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11352 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11358 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11360 lshift = build_int_cst (type, -1);
11361 lshift = int_const_binop (code, lshift, arg1, 0);
11363 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11367 /* Rewrite an LROTATE_EXPR by a constant into an
11368 RROTATE_EXPR by a new constant. */
11369 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11371 tree tem = build_int_cst (TREE_TYPE (arg1),
11372 TYPE_PRECISION (type));
11373 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11374 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11377 /* If we have a rotate of a bit operation with the rotate count and
11378 the second operand of the bit operation both constant,
11379 permute the two operations. */
11380 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11381 && (TREE_CODE (arg0) == BIT_AND_EXPR
11382 || TREE_CODE (arg0) == BIT_IOR_EXPR
11383 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11385 return fold_build2 (TREE_CODE (arg0), type,
11386 fold_build2 (code, type,
11387 TREE_OPERAND (arg0, 0), arg1),
11388 fold_build2 (code, type,
11389 TREE_OPERAND (arg0, 1), arg1));
11391 /* Two consecutive rotates adding up to the precision of the
11392 type can be ignored. */
11393 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11394 && TREE_CODE (arg0) == RROTATE_EXPR
11395 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11396 && TREE_INT_CST_HIGH (arg1) == 0
11397 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11398 && ((TREE_INT_CST_LOW (arg1)
11399 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11400 == (unsigned int) TYPE_PRECISION (type)))
11401 return TREE_OPERAND (arg0, 0);
11403 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11404 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11405 if the latter can be further optimized. */
11406 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11407 && TREE_CODE (arg0) == BIT_AND_EXPR
11408 && TREE_CODE (arg1) == INTEGER_CST
11409 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11411 tree mask = fold_build2 (code, type,
11412 fold_convert (type, TREE_OPERAND (arg0, 1)),
11414 tree shift = fold_build2 (code, type,
11415 fold_convert (type, TREE_OPERAND (arg0, 0)),
11417 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11425 if (operand_equal_p (arg0, arg1, 0))
11426 return omit_one_operand (type, arg0, arg1);
11427 if (INTEGRAL_TYPE_P (type)
11428 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11429 return omit_one_operand (type, arg1, arg0);
11430 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11436 if (operand_equal_p (arg0, arg1, 0))
11437 return omit_one_operand (type, arg0, arg1);
11438 if (INTEGRAL_TYPE_P (type)
11439 && TYPE_MAX_VALUE (type)
11440 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11441 return omit_one_operand (type, arg1, arg0);
11442 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11447 case TRUTH_ANDIF_EXPR:
11448 /* Note that the operands of this must be ints
11449 and their values must be 0 or 1.
11450 ("true" is a fixed value perhaps depending on the language.) */
11451 /* If first arg is constant zero, return it. */
11452 if (integer_zerop (arg0))
11453 return fold_convert (type, arg0);
11454 case TRUTH_AND_EXPR:
11455 /* If either arg is constant true, drop it. */
11456 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11457 return non_lvalue (fold_convert (type, arg1));
11458 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11459 /* Preserve sequence points. */
11460 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11461 return non_lvalue (fold_convert (type, arg0));
11462 /* If second arg is constant zero, result is zero, but first arg
11463 must be evaluated. */
11464 if (integer_zerop (arg1))
11465 return omit_one_operand (type, arg1, arg0);
11466 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11467 case will be handled here. */
11468 if (integer_zerop (arg0))
11469 return omit_one_operand (type, arg0, arg1);
11471 /* !X && X is always false. */
11472 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11473 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11474 return omit_one_operand (type, integer_zero_node, arg1);
11475 /* X && !X is always false. */
11476 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11477 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11478 return omit_one_operand (type, integer_zero_node, arg0);
11480 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11481 means A >= Y && A != MAX, but in this case we know that
11484 if (!TREE_SIDE_EFFECTS (arg0)
11485 && !TREE_SIDE_EFFECTS (arg1))
11487 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11488 if (tem && !operand_equal_p (tem, arg0, 0))
11489 return fold_build2 (code, type, tem, arg1);
11491 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11492 if (tem && !operand_equal_p (tem, arg1, 0))
11493 return fold_build2 (code, type, arg0, tem);
11497 /* We only do these simplifications if we are optimizing. */
11501 /* Check for things like (A || B) && (A || C). We can convert this
11502 to A || (B && C). Note that either operator can be any of the four
11503 truth and/or operations and the transformation will still be
11504 valid. Also note that we only care about order for the
11505 ANDIF and ORIF operators. If B contains side effects, this
11506 might change the truth-value of A. */
11507 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11508 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11509 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11510 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11511 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11512 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11514 tree a00 = TREE_OPERAND (arg0, 0);
11515 tree a01 = TREE_OPERAND (arg0, 1);
11516 tree a10 = TREE_OPERAND (arg1, 0);
11517 tree a11 = TREE_OPERAND (arg1, 1);
11518 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11519 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11520 && (code == TRUTH_AND_EXPR
11521 || code == TRUTH_OR_EXPR));
11523 if (operand_equal_p (a00, a10, 0))
11524 return fold_build2 (TREE_CODE (arg0), type, a00,
11525 fold_build2 (code, type, a01, a11));
11526 else if (commutative && operand_equal_p (a00, a11, 0))
11527 return fold_build2 (TREE_CODE (arg0), type, a00,
11528 fold_build2 (code, type, a01, a10));
11529 else if (commutative && operand_equal_p (a01, a10, 0))
11530 return fold_build2 (TREE_CODE (arg0), type, a01,
11531 fold_build2 (code, type, a00, a11));
11533 /* This case if tricky because we must either have commutative
11534 operators or else A10 must not have side-effects. */
11536 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11537 && operand_equal_p (a01, a11, 0))
11538 return fold_build2 (TREE_CODE (arg0), type,
11539 fold_build2 (code, type, a00, a10),
11543 /* See if we can build a range comparison. */
11544 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11547 /* Check for the possibility of merging component references. If our
11548 lhs is another similar operation, try to merge its rhs with our
11549 rhs. Then try to merge our lhs and rhs. */
11550 if (TREE_CODE (arg0) == code
11551 && 0 != (tem = fold_truthop (code, type,
11552 TREE_OPERAND (arg0, 1), arg1)))
11553 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11555 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11560 case TRUTH_ORIF_EXPR:
11561 /* Note that the operands of this must be ints
11562 and their values must be 0 or true.
11563 ("true" is a fixed value perhaps depending on the language.) */
11564 /* If first arg is constant true, return it. */
11565 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11566 return fold_convert (type, arg0);
11567 case TRUTH_OR_EXPR:
11568 /* If either arg is constant zero, drop it. */
11569 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11570 return non_lvalue (fold_convert (type, arg1));
11571 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11572 /* Preserve sequence points. */
11573 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11574 return non_lvalue (fold_convert (type, arg0));
11575 /* If second arg is constant true, result is true, but we must
11576 evaluate first arg. */
11577 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11578 return omit_one_operand (type, arg1, arg0);
11579 /* Likewise for first arg, but note this only occurs here for
11581 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11582 return omit_one_operand (type, arg0, arg1);
11584 /* !X || X is always true. */
11585 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11586 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11587 return omit_one_operand (type, integer_one_node, arg1);
11588 /* X || !X is always true. */
11589 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11590 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11591 return omit_one_operand (type, integer_one_node, arg0);
11595 case TRUTH_XOR_EXPR:
11596 /* If the second arg is constant zero, drop it. */
11597 if (integer_zerop (arg1))
11598 return non_lvalue (fold_convert (type, arg0));
11599 /* If the second arg is constant true, this is a logical inversion. */
11600 if (integer_onep (arg1))
11602 /* Only call invert_truthvalue if operand is a truth value. */
11603 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11604 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11606 tem = invert_truthvalue (arg0);
11607 return non_lvalue (fold_convert (type, tem));
11609 /* Identical arguments cancel to zero. */
11610 if (operand_equal_p (arg0, arg1, 0))
11611 return omit_one_operand (type, integer_zero_node, arg0);
11613 /* !X ^ X is always true. */
11614 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11615 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11616 return omit_one_operand (type, integer_one_node, arg1);
11618 /* X ^ !X is always true. */
11619 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11620 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11621 return omit_one_operand (type, integer_one_node, arg0);
11627 tem = fold_comparison (code, type, op0, op1);
11628 if (tem != NULL_TREE)
11631 /* bool_var != 0 becomes bool_var. */
11632 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11633 && code == NE_EXPR)
11634 return non_lvalue (fold_convert (type, arg0));
11636 /* bool_var == 1 becomes bool_var. */
11637 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11638 && code == EQ_EXPR)
11639 return non_lvalue (fold_convert (type, arg0));
11641 /* bool_var != 1 becomes !bool_var. */
11642 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11643 && code == NE_EXPR)
11644 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11646 /* bool_var == 0 becomes !bool_var. */
11647 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11648 && code == EQ_EXPR)
11649 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11651 /* If this is an equality comparison of the address of two non-weak,
11652 unaliased symbols neither of which are extern (since we do not
11653 have access to attributes for externs), then we know the result. */
11654 if (TREE_CODE (arg0) == ADDR_EXPR
11655 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11656 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11657 && ! lookup_attribute ("alias",
11658 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11659 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11660 && TREE_CODE (arg1) == ADDR_EXPR
11661 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11662 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11663 && ! lookup_attribute ("alias",
11664 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11665 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11667 /* We know that we're looking at the address of two
11668 non-weak, unaliased, static _DECL nodes.
11670 It is both wasteful and incorrect to call operand_equal_p
11671 to compare the two ADDR_EXPR nodes. It is wasteful in that
11672 all we need to do is test pointer equality for the arguments
11673 to the two ADDR_EXPR nodes. It is incorrect to use
11674 operand_equal_p as that function is NOT equivalent to a
11675 C equality test. It can in fact return false for two
11676 objects which would test as equal using the C equality
11678 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11679 return constant_boolean_node (equal
11680 ? code == EQ_EXPR : code != EQ_EXPR,
11684 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11685 a MINUS_EXPR of a constant, we can convert it into a comparison with
11686 a revised constant as long as no overflow occurs. */
11687 if (TREE_CODE (arg1) == INTEGER_CST
11688 && (TREE_CODE (arg0) == PLUS_EXPR
11689 || TREE_CODE (arg0) == MINUS_EXPR)
11690 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11691 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11692 ? MINUS_EXPR : PLUS_EXPR,
11693 fold_convert (TREE_TYPE (arg0), arg1),
11694 TREE_OPERAND (arg0, 1), 0))
11695 && !TREE_OVERFLOW (tem))
11696 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11698 /* Similarly for a NEGATE_EXPR. */
11699 if (TREE_CODE (arg0) == NEGATE_EXPR
11700 && TREE_CODE (arg1) == INTEGER_CST
11701 && 0 != (tem = negate_expr (arg1))
11702 && TREE_CODE (tem) == INTEGER_CST
11703 && !TREE_OVERFLOW (tem))
11704 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11706 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11707 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11708 && TREE_CODE (arg1) == INTEGER_CST
11709 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11710 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11711 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11712 fold_convert (TREE_TYPE (arg0), arg1),
11713 TREE_OPERAND (arg0, 1)));
11715 /* Transform comparisons of the form X +- C CMP X. */
11716 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11717 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11719 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11720 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11722 tree cst = TREE_OPERAND (arg0, 1);
11724 if (code == EQ_EXPR
11725 && !integer_zerop (cst))
11726 return omit_two_operands (type, boolean_false_node,
11727 TREE_OPERAND (arg0, 0), arg1);
11729 return omit_two_operands (type, boolean_true_node,
11730 TREE_OPERAND (arg0, 0), arg1);
11733 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11734 for !=. Don't do this for ordered comparisons due to overflow. */
11735 if (TREE_CODE (arg0) == MINUS_EXPR
11736 && integer_zerop (arg1))
11737 return fold_build2 (code, type,
11738 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11740 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11741 if (TREE_CODE (arg0) == ABS_EXPR
11742 && (integer_zerop (arg1) || real_zerop (arg1)))
11743 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11745 /* If this is an EQ or NE comparison with zero and ARG0 is
11746 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11747 two operations, but the latter can be done in one less insn
11748 on machines that have only two-operand insns or on which a
11749 constant cannot be the first operand. */
11750 if (TREE_CODE (arg0) == BIT_AND_EXPR
11751 && integer_zerop (arg1))
11753 tree arg00 = TREE_OPERAND (arg0, 0);
11754 tree arg01 = TREE_OPERAND (arg0, 1);
11755 if (TREE_CODE (arg00) == LSHIFT_EXPR
11756 && integer_onep (TREE_OPERAND (arg00, 0)))
11758 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11759 arg01, TREE_OPERAND (arg00, 1));
11760 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11761 build_int_cst (TREE_TYPE (arg0), 1));
11762 return fold_build2 (code, type,
11763 fold_convert (TREE_TYPE (arg1), tem), arg1);
11765 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11766 && integer_onep (TREE_OPERAND (arg01, 0)))
11768 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11769 arg00, TREE_OPERAND (arg01, 1));
11770 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11771 build_int_cst (TREE_TYPE (arg0), 1));
11772 return fold_build2 (code, type,
11773 fold_convert (TREE_TYPE (arg1), tem), arg1);
11777 /* If this is an NE or EQ comparison of zero against the result of a
11778 signed MOD operation whose second operand is a power of 2, make
11779 the MOD operation unsigned since it is simpler and equivalent. */
11780 if (integer_zerop (arg1)
11781 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11782 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11783 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11784 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11785 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11786 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11788 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11789 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11790 fold_convert (newtype,
11791 TREE_OPERAND (arg0, 0)),
11792 fold_convert (newtype,
11793 TREE_OPERAND (arg0, 1)));
11795 return fold_build2 (code, type, newmod,
11796 fold_convert (newtype, arg1));
11799 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11800 C1 is a valid shift constant, and C2 is a power of two, i.e.
11802 if (TREE_CODE (arg0) == BIT_AND_EXPR
11803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11804 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11806 && integer_pow2p (TREE_OPERAND (arg0, 1))
11807 && integer_zerop (arg1))
11809 tree itype = TREE_TYPE (arg0);
11810 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11811 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11813 /* Check for a valid shift count. */
11814 if (TREE_INT_CST_HIGH (arg001) == 0
11815 && TREE_INT_CST_LOW (arg001) < prec)
11817 tree arg01 = TREE_OPERAND (arg0, 1);
11818 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11819 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11820 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11821 can be rewritten as (X & (C2 << C1)) != 0. */
11822 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11824 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11825 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11826 return fold_build2 (code, type, tem, arg1);
11828 /* Otherwise, for signed (arithmetic) shifts,
11829 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11830 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11831 else if (!TYPE_UNSIGNED (itype))
11832 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11833 arg000, build_int_cst (itype, 0));
11834 /* Otherwise, of unsigned (logical) shifts,
11835 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11836 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11838 return omit_one_operand (type,
11839 code == EQ_EXPR ? integer_one_node
11840 : integer_zero_node,
11845 /* If this is an NE comparison of zero with an AND of one, remove the
11846 comparison since the AND will give the correct value. */
11847 if (code == NE_EXPR
11848 && integer_zerop (arg1)
11849 && TREE_CODE (arg0) == BIT_AND_EXPR
11850 && integer_onep (TREE_OPERAND (arg0, 1)))
11851 return fold_convert (type, arg0);
11853 /* If we have (A & C) == C where C is a power of 2, convert this into
11854 (A & C) != 0. Similarly for NE_EXPR. */
11855 if (TREE_CODE (arg0) == BIT_AND_EXPR
11856 && integer_pow2p (TREE_OPERAND (arg0, 1))
11857 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11858 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11859 arg0, fold_convert (TREE_TYPE (arg0),
11860 integer_zero_node));
11862 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11863 bit, then fold the expression into A < 0 or A >= 0. */
11864 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11868 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11869 Similarly for NE_EXPR. */
11870 if (TREE_CODE (arg0) == BIT_AND_EXPR
11871 && TREE_CODE (arg1) == INTEGER_CST
11872 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11874 tree notc = fold_build1 (BIT_NOT_EXPR,
11875 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11876 TREE_OPERAND (arg0, 1));
11877 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11879 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11880 if (integer_nonzerop (dandnotc))
11881 return omit_one_operand (type, rslt, arg0);
11884 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11885 Similarly for NE_EXPR. */
11886 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11887 && TREE_CODE (arg1) == INTEGER_CST
11888 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11890 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11891 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11892 TREE_OPERAND (arg0, 1), notd);
11893 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11894 if (integer_nonzerop (candnotd))
11895 return omit_one_operand (type, rslt, arg0);
11898 /* Optimize comparisons of strlen vs zero to a compare of the
11899 first character of the string vs zero. To wit,
11900 strlen(ptr) == 0 => *ptr == 0
11901 strlen(ptr) != 0 => *ptr != 0
11902 Other cases should reduce to one of these two (or a constant)
11903 due to the return value of strlen being unsigned. */
11904 if (TREE_CODE (arg0) == CALL_EXPR
11905 && integer_zerop (arg1))
11907 tree fndecl = get_callee_fndecl (arg0);
11910 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11911 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11912 && call_expr_nargs (arg0) == 1
11913 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11915 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11916 return fold_build2 (code, type, iref,
11917 build_int_cst (TREE_TYPE (iref), 0));
11921 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11922 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11923 if (TREE_CODE (arg0) == RSHIFT_EXPR
11924 && integer_zerop (arg1)
11925 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11927 tree arg00 = TREE_OPERAND (arg0, 0);
11928 tree arg01 = TREE_OPERAND (arg0, 1);
11929 tree itype = TREE_TYPE (arg00);
11930 if (TREE_INT_CST_HIGH (arg01) == 0
11931 && TREE_INT_CST_LOW (arg01)
11932 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11934 if (TYPE_UNSIGNED (itype))
11936 itype = signed_type_for (itype);
11937 arg00 = fold_convert (itype, arg00);
11939 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11940 type, arg00, build_int_cst (itype, 0));
11944 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11945 if (integer_zerop (arg1)
11946 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11947 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11948 TREE_OPERAND (arg0, 1));
11950 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11951 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11952 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11953 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11954 build_int_cst (TREE_TYPE (arg1), 0));
11955 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11956 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11957 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11958 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11959 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11960 build_int_cst (TREE_TYPE (arg1), 0));
11962 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11963 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11964 && TREE_CODE (arg1) == INTEGER_CST
11965 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11966 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11967 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11968 TREE_OPERAND (arg0, 1), arg1));
11970 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11971 (X & C) == 0 when C is a single bit. */
11972 if (TREE_CODE (arg0) == BIT_AND_EXPR
11973 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11974 && integer_zerop (arg1)
11975 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11977 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11978 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11979 TREE_OPERAND (arg0, 1));
11980 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11984 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11985 constant C is a power of two, i.e. a single bit. */
11986 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11987 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11988 && integer_zerop (arg1)
11989 && integer_pow2p (TREE_OPERAND (arg0, 1))
11990 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11991 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11993 tree arg00 = TREE_OPERAND (arg0, 0);
11994 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11995 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11998 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11999 when is C is a power of two, i.e. a single bit. */
12000 if (TREE_CODE (arg0) == BIT_AND_EXPR
12001 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12002 && integer_zerop (arg1)
12003 && integer_pow2p (TREE_OPERAND (arg0, 1))
12004 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12005 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12007 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12008 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12009 arg000, TREE_OPERAND (arg0, 1));
12010 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12011 tem, build_int_cst (TREE_TYPE (tem), 0));
12014 if (integer_zerop (arg1)
12015 && tree_expr_nonzero_p (arg0))
12017 tree res = constant_boolean_node (code==NE_EXPR, type);
12018 return omit_one_operand (type, res, arg0);
12021 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12022 if (TREE_CODE (arg0) == NEGATE_EXPR
12023 && TREE_CODE (arg1) == NEGATE_EXPR)
12024 return fold_build2 (code, type,
12025 TREE_OPERAND (arg0, 0),
12026 TREE_OPERAND (arg1, 0));
12028 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12029 if (TREE_CODE (arg0) == BIT_AND_EXPR
12030 && TREE_CODE (arg1) == BIT_AND_EXPR)
12032 tree arg00 = TREE_OPERAND (arg0, 0);
12033 tree arg01 = TREE_OPERAND (arg0, 1);
12034 tree arg10 = TREE_OPERAND (arg1, 0);
12035 tree arg11 = TREE_OPERAND (arg1, 1);
12036 tree itype = TREE_TYPE (arg0);
12038 if (operand_equal_p (arg01, arg11, 0))
12039 return fold_build2 (code, type,
12040 fold_build2 (BIT_AND_EXPR, itype,
12041 fold_build2 (BIT_XOR_EXPR, itype,
12044 build_int_cst (itype, 0));
12046 if (operand_equal_p (arg01, arg10, 0))
12047 return fold_build2 (code, type,
12048 fold_build2 (BIT_AND_EXPR, itype,
12049 fold_build2 (BIT_XOR_EXPR, itype,
12052 build_int_cst (itype, 0));
12054 if (operand_equal_p (arg00, arg11, 0))
12055 return fold_build2 (code, type,
12056 fold_build2 (BIT_AND_EXPR, itype,
12057 fold_build2 (BIT_XOR_EXPR, itype,
12060 build_int_cst (itype, 0));
12062 if (operand_equal_p (arg00, arg10, 0))
12063 return fold_build2 (code, type,
12064 fold_build2 (BIT_AND_EXPR, itype,
12065 fold_build2 (BIT_XOR_EXPR, itype,
12068 build_int_cst (itype, 0));
12071 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12072 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12074 tree arg00 = TREE_OPERAND (arg0, 0);
12075 tree arg01 = TREE_OPERAND (arg0, 1);
12076 tree arg10 = TREE_OPERAND (arg1, 0);
12077 tree arg11 = TREE_OPERAND (arg1, 1);
12078 tree itype = TREE_TYPE (arg0);
12080 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12081 operand_equal_p guarantees no side-effects so we don't need
12082 to use omit_one_operand on Z. */
12083 if (operand_equal_p (arg01, arg11, 0))
12084 return fold_build2 (code, type, arg00, arg10);
12085 if (operand_equal_p (arg01, arg10, 0))
12086 return fold_build2 (code, type, arg00, arg11);
12087 if (operand_equal_p (arg00, arg11, 0))
12088 return fold_build2 (code, type, arg01, arg10);
12089 if (operand_equal_p (arg00, arg10, 0))
12090 return fold_build2 (code, type, arg01, arg11);
12092 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12093 if (TREE_CODE (arg01) == INTEGER_CST
12094 && TREE_CODE (arg11) == INTEGER_CST)
12095 return fold_build2 (code, type,
12096 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12097 fold_build2 (BIT_XOR_EXPR, itype,
12102 /* Attempt to simplify equality/inequality comparisons of complex
12103 values. Only lower the comparison if the result is known or
12104 can be simplified to a single scalar comparison. */
12105 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12106 || TREE_CODE (arg0) == COMPLEX_CST)
12107 && (TREE_CODE (arg1) == COMPLEX_EXPR
12108 || TREE_CODE (arg1) == COMPLEX_CST))
12110 tree real0, imag0, real1, imag1;
12113 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12115 real0 = TREE_OPERAND (arg0, 0);
12116 imag0 = TREE_OPERAND (arg0, 1);
12120 real0 = TREE_REALPART (arg0);
12121 imag0 = TREE_IMAGPART (arg0);
12124 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12126 real1 = TREE_OPERAND (arg1, 0);
12127 imag1 = TREE_OPERAND (arg1, 1);
12131 real1 = TREE_REALPART (arg1);
12132 imag1 = TREE_IMAGPART (arg1);
12135 rcond = fold_binary (code, type, real0, real1);
12136 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12138 if (integer_zerop (rcond))
12140 if (code == EQ_EXPR)
12141 return omit_two_operands (type, boolean_false_node,
12143 return fold_build2 (NE_EXPR, type, imag0, imag1);
12147 if (code == NE_EXPR)
12148 return omit_two_operands (type, boolean_true_node,
12150 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12154 icond = fold_binary (code, type, imag0, imag1);
12155 if (icond && TREE_CODE (icond) == INTEGER_CST)
12157 if (integer_zerop (icond))
12159 if (code == EQ_EXPR)
12160 return omit_two_operands (type, boolean_false_node,
12162 return fold_build2 (NE_EXPR, type, real0, real1);
12166 if (code == NE_EXPR)
12167 return omit_two_operands (type, boolean_true_node,
12169 return fold_build2 (EQ_EXPR, type, real0, real1);
12180 tem = fold_comparison (code, type, op0, op1);
12181 if (tem != NULL_TREE)
12184 /* Transform comparisons of the form X +- C CMP X. */
12185 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12186 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12187 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12188 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12189 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12190 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12192 tree arg01 = TREE_OPERAND (arg0, 1);
12193 enum tree_code code0 = TREE_CODE (arg0);
12196 if (TREE_CODE (arg01) == REAL_CST)
12197 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12199 is_positive = tree_int_cst_sgn (arg01);
12201 /* (X - c) > X becomes false. */
12202 if (code == GT_EXPR
12203 && ((code0 == MINUS_EXPR && is_positive >= 0)
12204 || (code0 == PLUS_EXPR && is_positive <= 0)))
12206 if (TREE_CODE (arg01) == INTEGER_CST
12207 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12208 fold_overflow_warning (("assuming signed overflow does not "
12209 "occur when assuming that (X - c) > X "
12210 "is always false"),
12211 WARN_STRICT_OVERFLOW_ALL);
12212 return constant_boolean_node (0, type);
12215 /* Likewise (X + c) < X becomes false. */
12216 if (code == LT_EXPR
12217 && ((code0 == PLUS_EXPR && is_positive >= 0)
12218 || (code0 == MINUS_EXPR && is_positive <= 0)))
12220 if (TREE_CODE (arg01) == INTEGER_CST
12221 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12222 fold_overflow_warning (("assuming signed overflow does not "
12223 "occur when assuming that "
12224 "(X + c) < X is always false"),
12225 WARN_STRICT_OVERFLOW_ALL);
12226 return constant_boolean_node (0, type);
12229 /* Convert (X - c) <= X to true. */
12230 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12232 && ((code0 == MINUS_EXPR && is_positive >= 0)
12233 || (code0 == PLUS_EXPR && is_positive <= 0)))
12235 if (TREE_CODE (arg01) == INTEGER_CST
12236 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12237 fold_overflow_warning (("assuming signed overflow does not "
12238 "occur when assuming that "
12239 "(X - c) <= X is always true"),
12240 WARN_STRICT_OVERFLOW_ALL);
12241 return constant_boolean_node (1, type);
12244 /* Convert (X + c) >= X to true. */
12245 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12247 && ((code0 == PLUS_EXPR && is_positive >= 0)
12248 || (code0 == MINUS_EXPR && is_positive <= 0)))
12250 if (TREE_CODE (arg01) == INTEGER_CST
12251 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12252 fold_overflow_warning (("assuming signed overflow does not "
12253 "occur when assuming that "
12254 "(X + c) >= X is always true"),
12255 WARN_STRICT_OVERFLOW_ALL);
12256 return constant_boolean_node (1, type);
12259 if (TREE_CODE (arg01) == INTEGER_CST)
12261 /* Convert X + c > X and X - c < X to true for integers. */
12262 if (code == GT_EXPR
12263 && ((code0 == PLUS_EXPR && is_positive > 0)
12264 || (code0 == MINUS_EXPR && is_positive < 0)))
12266 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12267 fold_overflow_warning (("assuming signed overflow does "
12268 "not occur when assuming that "
12269 "(X + c) > X is always true"),
12270 WARN_STRICT_OVERFLOW_ALL);
12271 return constant_boolean_node (1, type);
12274 if (code == LT_EXPR
12275 && ((code0 == MINUS_EXPR && is_positive > 0)
12276 || (code0 == PLUS_EXPR && is_positive < 0)))
12278 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12279 fold_overflow_warning (("assuming signed overflow does "
12280 "not occur when assuming that "
12281 "(X - c) < X is always true"),
12282 WARN_STRICT_OVERFLOW_ALL);
12283 return constant_boolean_node (1, type);
12286 /* Convert X + c <= X and X - c >= X to false for integers. */
12287 if (code == LE_EXPR
12288 && ((code0 == PLUS_EXPR && is_positive > 0)
12289 || (code0 == MINUS_EXPR && is_positive < 0)))
12291 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12292 fold_overflow_warning (("assuming signed overflow does "
12293 "not occur when assuming that "
12294 "(X + c) <= X is always false"),
12295 WARN_STRICT_OVERFLOW_ALL);
12296 return constant_boolean_node (0, type);
12299 if (code == GE_EXPR
12300 && ((code0 == MINUS_EXPR && is_positive > 0)
12301 || (code0 == PLUS_EXPR && is_positive < 0)))
12303 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12304 fold_overflow_warning (("assuming signed overflow does "
12305 "not occur when assuming that "
12306 "(X - c) >= X is always false"),
12307 WARN_STRICT_OVERFLOW_ALL);
12308 return constant_boolean_node (0, type);
12313 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12314 This transformation affects the cases which are handled in later
12315 optimizations involving comparisons with non-negative constants. */
12316 if (TREE_CODE (arg1) == INTEGER_CST
12317 && TREE_CODE (arg0) != INTEGER_CST
12318 && tree_int_cst_sgn (arg1) > 0)
12320 if (code == GE_EXPR)
12322 arg1 = const_binop (MINUS_EXPR, arg1,
12323 build_int_cst (TREE_TYPE (arg1), 1), 0);
12324 return fold_build2 (GT_EXPR, type, arg0,
12325 fold_convert (TREE_TYPE (arg0), arg1));
12327 if (code == LT_EXPR)
12329 arg1 = const_binop (MINUS_EXPR, arg1,
12330 build_int_cst (TREE_TYPE (arg1), 1), 0);
12331 return fold_build2 (LE_EXPR, type, arg0,
12332 fold_convert (TREE_TYPE (arg0), arg1));
12336 /* Comparisons with the highest or lowest possible integer of
12337 the specified precision will have known values. */
12339 tree arg1_type = TREE_TYPE (arg1);
12340 unsigned int width = TYPE_PRECISION (arg1_type);
12342 if (TREE_CODE (arg1) == INTEGER_CST
12343 && !TREE_OVERFLOW (arg1)
12344 && width <= 2 * HOST_BITS_PER_WIDE_INT
12345 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12347 HOST_WIDE_INT signed_max_hi;
12348 unsigned HOST_WIDE_INT signed_max_lo;
12349 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12351 if (width <= HOST_BITS_PER_WIDE_INT)
12353 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12358 if (TYPE_UNSIGNED (arg1_type))
12360 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12366 max_lo = signed_max_lo;
12367 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12373 width -= HOST_BITS_PER_WIDE_INT;
12374 signed_max_lo = -1;
12375 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12380 if (TYPE_UNSIGNED (arg1_type))
12382 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12387 max_hi = signed_max_hi;
12388 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12392 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12393 && TREE_INT_CST_LOW (arg1) == max_lo)
12397 return omit_one_operand (type, integer_zero_node, arg0);
12400 return fold_build2 (EQ_EXPR, type, op0, op1);
12403 return omit_one_operand (type, integer_one_node, arg0);
12406 return fold_build2 (NE_EXPR, type, op0, op1);
12408 /* The GE_EXPR and LT_EXPR cases above are not normally
12409 reached because of previous transformations. */
12414 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12416 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12420 arg1 = const_binop (PLUS_EXPR, arg1,
12421 build_int_cst (TREE_TYPE (arg1), 1), 0);
12422 return fold_build2 (EQ_EXPR, type,
12423 fold_convert (TREE_TYPE (arg1), arg0),
12426 arg1 = const_binop (PLUS_EXPR, arg1,
12427 build_int_cst (TREE_TYPE (arg1), 1), 0);
12428 return fold_build2 (NE_EXPR, type,
12429 fold_convert (TREE_TYPE (arg1), arg0),
12434 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12436 && TREE_INT_CST_LOW (arg1) == min_lo)
12440 return omit_one_operand (type, integer_zero_node, arg0);
12443 return fold_build2 (EQ_EXPR, type, op0, op1);
12446 return omit_one_operand (type, integer_one_node, arg0);
12449 return fold_build2 (NE_EXPR, type, op0, op1);
12454 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12456 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12460 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12461 return fold_build2 (NE_EXPR, type,
12462 fold_convert (TREE_TYPE (arg1), arg0),
12465 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12466 return fold_build2 (EQ_EXPR, type,
12467 fold_convert (TREE_TYPE (arg1), arg0),
12473 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12474 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12475 && TYPE_UNSIGNED (arg1_type)
12476 /* We will flip the signedness of the comparison operator
12477 associated with the mode of arg1, so the sign bit is
12478 specified by this mode. Check that arg1 is the signed
12479 max associated with this sign bit. */
12480 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12481 /* signed_type does not work on pointer types. */
12482 && INTEGRAL_TYPE_P (arg1_type))
12484 /* The following case also applies to X < signed_max+1
12485 and X >= signed_max+1 because previous transformations. */
12486 if (code == LE_EXPR || code == GT_EXPR)
12489 st = signed_type_for (TREE_TYPE (arg1));
12490 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12491 type, fold_convert (st, arg0),
12492 build_int_cst (st, 0));
12498 /* If we are comparing an ABS_EXPR with a constant, we can
12499 convert all the cases into explicit comparisons, but they may
12500 well not be faster than doing the ABS and one comparison.
12501 But ABS (X) <= C is a range comparison, which becomes a subtraction
12502 and a comparison, and is probably faster. */
12503 if (code == LE_EXPR
12504 && TREE_CODE (arg1) == INTEGER_CST
12505 && TREE_CODE (arg0) == ABS_EXPR
12506 && ! TREE_SIDE_EFFECTS (arg0)
12507 && (0 != (tem = negate_expr (arg1)))
12508 && TREE_CODE (tem) == INTEGER_CST
12509 && !TREE_OVERFLOW (tem))
12510 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12511 build2 (GE_EXPR, type,
12512 TREE_OPERAND (arg0, 0), tem),
12513 build2 (LE_EXPR, type,
12514 TREE_OPERAND (arg0, 0), arg1));
12516 /* Convert ABS_EXPR<x> >= 0 to true. */
12517 strict_overflow_p = false;
12518 if (code == GE_EXPR
12519 && (integer_zerop (arg1)
12520 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12521 && real_zerop (arg1)))
12522 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12524 if (strict_overflow_p)
12525 fold_overflow_warning (("assuming signed overflow does not occur "
12526 "when simplifying comparison of "
12527 "absolute value and zero"),
12528 WARN_STRICT_OVERFLOW_CONDITIONAL);
12529 return omit_one_operand (type, integer_one_node, arg0);
12532 /* Convert ABS_EXPR<x> < 0 to false. */
12533 strict_overflow_p = false;
12534 if (code == LT_EXPR
12535 && (integer_zerop (arg1) || real_zerop (arg1))
12536 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12538 if (strict_overflow_p)
12539 fold_overflow_warning (("assuming signed overflow does not occur "
12540 "when simplifying comparison of "
12541 "absolute value and zero"),
12542 WARN_STRICT_OVERFLOW_CONDITIONAL);
12543 return omit_one_operand (type, integer_zero_node, arg0);
12546 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12547 and similarly for >= into !=. */
12548 if ((code == LT_EXPR || code == GE_EXPR)
12549 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12550 && TREE_CODE (arg1) == LSHIFT_EXPR
12551 && integer_onep (TREE_OPERAND (arg1, 0)))
12552 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12553 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12554 TREE_OPERAND (arg1, 1)),
12555 build_int_cst (TREE_TYPE (arg0), 0));
12557 if ((code == LT_EXPR || code == GE_EXPR)
12558 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12559 && (TREE_CODE (arg1) == NOP_EXPR
12560 || TREE_CODE (arg1) == CONVERT_EXPR)
12561 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12562 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12564 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12565 fold_convert (TREE_TYPE (arg0),
12566 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12567 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12569 build_int_cst (TREE_TYPE (arg0), 0));
12573 case UNORDERED_EXPR:
12581 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12583 t1 = fold_relational_const (code, type, arg0, arg1);
12584 if (t1 != NULL_TREE)
12588 /* If the first operand is NaN, the result is constant. */
12589 if (TREE_CODE (arg0) == REAL_CST
12590 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12591 && (code != LTGT_EXPR || ! flag_trapping_math))
12593 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12594 ? integer_zero_node
12595 : integer_one_node;
12596 return omit_one_operand (type, t1, arg1);
12599 /* If the second operand is NaN, the result is constant. */
12600 if (TREE_CODE (arg1) == REAL_CST
12601 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12602 && (code != LTGT_EXPR || ! flag_trapping_math))
12604 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12605 ? integer_zero_node
12606 : integer_one_node;
12607 return omit_one_operand (type, t1, arg0);
12610 /* Simplify unordered comparison of something with itself. */
12611 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12612 && operand_equal_p (arg0, arg1, 0))
12613 return constant_boolean_node (1, type);
12615 if (code == LTGT_EXPR
12616 && !flag_trapping_math
12617 && operand_equal_p (arg0, arg1, 0))
12618 return constant_boolean_node (0, type);
12620 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12622 tree targ0 = strip_float_extensions (arg0);
12623 tree targ1 = strip_float_extensions (arg1);
12624 tree newtype = TREE_TYPE (targ0);
12626 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12627 newtype = TREE_TYPE (targ1);
12629 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12630 return fold_build2 (code, type, fold_convert (newtype, targ0),
12631 fold_convert (newtype, targ1));
12636 case COMPOUND_EXPR:
12637 /* When pedantic, a compound expression can be neither an lvalue
12638 nor an integer constant expression. */
12639 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12641 /* Don't let (0, 0) be null pointer constant. */
12642 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12643 : fold_convert (type, arg1);
12644 return pedantic_non_lvalue (tem);
12647 if ((TREE_CODE (arg0) == REAL_CST
12648 && TREE_CODE (arg1) == REAL_CST)
12649 || (TREE_CODE (arg0) == INTEGER_CST
12650 && TREE_CODE (arg1) == INTEGER_CST))
12651 return build_complex (type, arg0, arg1);
12655 /* An ASSERT_EXPR should never be passed to fold_binary. */
12656 gcc_unreachable ();
12660 } /* switch (code) */
12663 /* Callback for walk_tree, looking for LABEL_EXPR.
12664 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12665 Do not check the sub-tree of GOTO_EXPR. */
12668 contains_label_1 (tree *tp,
12669 int *walk_subtrees,
12670 void *data ATTRIBUTE_UNUSED)
12672 switch (TREE_CODE (*tp))
12677 *walk_subtrees = 0;
12684 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12685 accessible from outside the sub-tree. Returns NULL_TREE if no
12686 addressable label is found. */
12689 contains_label_p (tree st)
12691 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12694 /* Fold a ternary expression of code CODE and type TYPE with operands
12695 OP0, OP1, and OP2. Return the folded expression if folding is
12696 successful. Otherwise, return NULL_TREE. */
12699 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12702 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12703 enum tree_code_class kind = TREE_CODE_CLASS (code);
12705 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12706 && TREE_CODE_LENGTH (code) == 3);
12708 /* Strip any conversions that don't change the mode. This is safe
12709 for every expression, except for a comparison expression because
12710 its signedness is derived from its operands. So, in the latter
12711 case, only strip conversions that don't change the signedness.
12713 Note that this is done as an internal manipulation within the
12714 constant folder, in order to find the simplest representation of
12715 the arguments so that their form can be studied. In any cases,
12716 the appropriate type conversions should be put back in the tree
12717 that will get out of the constant folder. */
12732 case COMPONENT_REF:
12733 if (TREE_CODE (arg0) == CONSTRUCTOR
12734 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12736 unsigned HOST_WIDE_INT idx;
12738 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12745 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12746 so all simple results must be passed through pedantic_non_lvalue. */
12747 if (TREE_CODE (arg0) == INTEGER_CST)
12749 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12750 tem = integer_zerop (arg0) ? op2 : op1;
12751 /* Only optimize constant conditions when the selected branch
12752 has the same type as the COND_EXPR. This avoids optimizing
12753 away "c ? x : throw", where the throw has a void type.
12754 Avoid throwing away that operand which contains label. */
12755 if ((!TREE_SIDE_EFFECTS (unused_op)
12756 || !contains_label_p (unused_op))
12757 && (! VOID_TYPE_P (TREE_TYPE (tem))
12758 || VOID_TYPE_P (type)))
12759 return pedantic_non_lvalue (tem);
12762 if (operand_equal_p (arg1, op2, 0))
12763 return pedantic_omit_one_operand (type, arg1, arg0);
12765 /* If we have A op B ? A : C, we may be able to convert this to a
12766 simpler expression, depending on the operation and the values
12767 of B and C. Signed zeros prevent all of these transformations,
12768 for reasons given above each one.
12770 Also try swapping the arguments and inverting the conditional. */
12771 if (COMPARISON_CLASS_P (arg0)
12772 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12773 arg1, TREE_OPERAND (arg0, 1))
12774 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12776 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12781 if (COMPARISON_CLASS_P (arg0)
12782 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12784 TREE_OPERAND (arg0, 1))
12785 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12787 tem = fold_truth_not_expr (arg0);
12788 if (tem && COMPARISON_CLASS_P (tem))
12790 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12796 /* If the second operand is simpler than the third, swap them
12797 since that produces better jump optimization results. */
12798 if (truth_value_p (TREE_CODE (arg0))
12799 && tree_swap_operands_p (op1, op2, false))
12801 /* See if this can be inverted. If it can't, possibly because
12802 it was a floating-point inequality comparison, don't do
12804 tem = fold_truth_not_expr (arg0);
12806 return fold_build3 (code, type, tem, op2, op1);
12809 /* Convert A ? 1 : 0 to simply A. */
12810 if (integer_onep (op1)
12811 && integer_zerop (op2)
12812 /* If we try to convert OP0 to our type, the
12813 call to fold will try to move the conversion inside
12814 a COND, which will recurse. In that case, the COND_EXPR
12815 is probably the best choice, so leave it alone. */
12816 && type == TREE_TYPE (arg0))
12817 return pedantic_non_lvalue (arg0);
12819 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12820 over COND_EXPR in cases such as floating point comparisons. */
12821 if (integer_zerop (op1)
12822 && integer_onep (op2)
12823 && truth_value_p (TREE_CODE (arg0)))
12824 return pedantic_non_lvalue (fold_convert (type,
12825 invert_truthvalue (arg0)));
12827 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12828 if (TREE_CODE (arg0) == LT_EXPR
12829 && integer_zerop (TREE_OPERAND (arg0, 1))
12830 && integer_zerop (op2)
12831 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12833 /* sign_bit_p only checks ARG1 bits within A's precision.
12834 If <sign bit of A> has wider type than A, bits outside
12835 of A's precision in <sign bit of A> need to be checked.
12836 If they are all 0, this optimization needs to be done
12837 in unsigned A's type, if they are all 1 in signed A's type,
12838 otherwise this can't be done. */
12839 if (TYPE_PRECISION (TREE_TYPE (tem))
12840 < TYPE_PRECISION (TREE_TYPE (arg1))
12841 && TYPE_PRECISION (TREE_TYPE (tem))
12842 < TYPE_PRECISION (type))
12844 unsigned HOST_WIDE_INT mask_lo;
12845 HOST_WIDE_INT mask_hi;
12846 int inner_width, outer_width;
12849 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12850 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12851 if (outer_width > TYPE_PRECISION (type))
12852 outer_width = TYPE_PRECISION (type);
12854 if (outer_width > HOST_BITS_PER_WIDE_INT)
12856 mask_hi = ((unsigned HOST_WIDE_INT) -1
12857 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12863 mask_lo = ((unsigned HOST_WIDE_INT) -1
12864 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12866 if (inner_width > HOST_BITS_PER_WIDE_INT)
12868 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12869 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12873 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12874 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12876 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12877 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12879 tem_type = signed_type_for (TREE_TYPE (tem));
12880 tem = fold_convert (tem_type, tem);
12882 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12883 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12885 tem_type = unsigned_type_for (TREE_TYPE (tem));
12886 tem = fold_convert (tem_type, tem);
12893 return fold_convert (type,
12894 fold_build2 (BIT_AND_EXPR,
12895 TREE_TYPE (tem), tem,
12896 fold_convert (TREE_TYPE (tem),
12900 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12901 already handled above. */
12902 if (TREE_CODE (arg0) == BIT_AND_EXPR
12903 && integer_onep (TREE_OPERAND (arg0, 1))
12904 && integer_zerop (op2)
12905 && integer_pow2p (arg1))
12907 tree tem = TREE_OPERAND (arg0, 0);
12909 if (TREE_CODE (tem) == RSHIFT_EXPR
12910 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12911 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12912 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12913 return fold_build2 (BIT_AND_EXPR, type,
12914 TREE_OPERAND (tem, 0), arg1);
12917 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12918 is probably obsolete because the first operand should be a
12919 truth value (that's why we have the two cases above), but let's
12920 leave it in until we can confirm this for all front-ends. */
12921 if (integer_zerop (op2)
12922 && TREE_CODE (arg0) == NE_EXPR
12923 && integer_zerop (TREE_OPERAND (arg0, 1))
12924 && integer_pow2p (arg1)
12925 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12926 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12927 arg1, OEP_ONLY_CONST))
12928 return pedantic_non_lvalue (fold_convert (type,
12929 TREE_OPERAND (arg0, 0)));
12931 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12932 if (integer_zerop (op2)
12933 && truth_value_p (TREE_CODE (arg0))
12934 && truth_value_p (TREE_CODE (arg1)))
12935 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12936 fold_convert (type, arg0),
12939 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12940 if (integer_onep (op2)
12941 && truth_value_p (TREE_CODE (arg0))
12942 && truth_value_p (TREE_CODE (arg1)))
12944 /* Only perform transformation if ARG0 is easily inverted. */
12945 tem = fold_truth_not_expr (arg0);
12947 return fold_build2 (TRUTH_ORIF_EXPR, type,
12948 fold_convert (type, tem),
12952 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12953 if (integer_zerop (arg1)
12954 && truth_value_p (TREE_CODE (arg0))
12955 && truth_value_p (TREE_CODE (op2)))
12957 /* Only perform transformation if ARG0 is easily inverted. */
12958 tem = fold_truth_not_expr (arg0);
12960 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12961 fold_convert (type, tem),
12965 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12966 if (integer_onep (arg1)
12967 && truth_value_p (TREE_CODE (arg0))
12968 && truth_value_p (TREE_CODE (op2)))
12969 return fold_build2 (TRUTH_ORIF_EXPR, type,
12970 fold_convert (type, arg0),
12976 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12977 of fold_ternary on them. */
12978 gcc_unreachable ();
12980 case BIT_FIELD_REF:
12981 if ((TREE_CODE (arg0) == VECTOR_CST
12982 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12983 && type == TREE_TYPE (TREE_TYPE (arg0)))
12985 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12986 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12989 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12990 && (idx % width) == 0
12991 && (idx = idx / width)
12992 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12994 tree elements = NULL_TREE;
12996 if (TREE_CODE (arg0) == VECTOR_CST)
12997 elements = TREE_VECTOR_CST_ELTS (arg0);
13000 unsigned HOST_WIDE_INT idx;
13003 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13004 elements = tree_cons (NULL_TREE, value, elements);
13006 while (idx-- > 0 && elements)
13007 elements = TREE_CHAIN (elements);
13009 return TREE_VALUE (elements);
13011 return fold_convert (type, integer_zero_node);
13018 } /* switch (code) */
13021 /* Perform constant folding and related simplification of EXPR.
13022 The related simplifications include x*1 => x, x*0 => 0, etc.,
13023 and application of the associative law.
13024 NOP_EXPR conversions may be removed freely (as long as we
13025 are careful not to change the type of the overall expression).
13026 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13027 but we can constant-fold them if they have constant operands. */
13029 #ifdef ENABLE_FOLD_CHECKING
13030 # define fold(x) fold_1 (x)
13031 static tree fold_1 (tree);
13037 const tree t = expr;
13038 enum tree_code code = TREE_CODE (t);
13039 enum tree_code_class kind = TREE_CODE_CLASS (code);
13042 /* Return right away if a constant. */
13043 if (kind == tcc_constant)
13046 /* CALL_EXPR-like objects with variable numbers of operands are
13047 treated specially. */
13048 if (kind == tcc_vl_exp)
13050 if (code == CALL_EXPR)
13052 tem = fold_call_expr (expr, false);
13053 return tem ? tem : expr;
13058 if (IS_EXPR_CODE_CLASS (kind)
13059 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13061 tree type = TREE_TYPE (t);
13062 tree op0, op1, op2;
13064 switch (TREE_CODE_LENGTH (code))
13067 op0 = TREE_OPERAND (t, 0);
13068 tem = fold_unary (code, type, op0);
13069 return tem ? tem : expr;
13071 op0 = TREE_OPERAND (t, 0);
13072 op1 = TREE_OPERAND (t, 1);
13073 tem = fold_binary (code, type, op0, op1);
13074 return tem ? tem : expr;
13076 op0 = TREE_OPERAND (t, 0);
13077 op1 = TREE_OPERAND (t, 1);
13078 op2 = TREE_OPERAND (t, 2);
13079 tem = fold_ternary (code, type, op0, op1, op2);
13080 return tem ? tem : expr;
13089 return fold (DECL_INITIAL (t));
13093 } /* switch (code) */
13096 #ifdef ENABLE_FOLD_CHECKING
13099 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13100 static void fold_check_failed (const_tree, const_tree);
13101 void print_fold_checksum (const_tree);
13103 /* When --enable-checking=fold, compute a digest of expr before
13104 and after actual fold call to see if fold did not accidentally
13105 change original expr. */
13111 struct md5_ctx ctx;
13112 unsigned char checksum_before[16], checksum_after[16];
13115 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13116 md5_init_ctx (&ctx);
13117 fold_checksum_tree (expr, &ctx, ht);
13118 md5_finish_ctx (&ctx, checksum_before);
13121 ret = fold_1 (expr);
13123 md5_init_ctx (&ctx);
13124 fold_checksum_tree (expr, &ctx, ht);
13125 md5_finish_ctx (&ctx, checksum_after);
13128 if (memcmp (checksum_before, checksum_after, 16))
13129 fold_check_failed (expr, ret);
13135 print_fold_checksum (const_tree expr)
13137 struct md5_ctx ctx;
13138 unsigned char checksum[16], cnt;
13141 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13142 md5_init_ctx (&ctx);
13143 fold_checksum_tree (expr, &ctx, ht);
13144 md5_finish_ctx (&ctx, checksum);
13146 for (cnt = 0; cnt < 16; ++cnt)
13147 fprintf (stderr, "%02x", checksum[cnt]);
13148 putc ('\n', stderr);
13152 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13154 internal_error ("fold check: original tree changed by fold");
13158 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13161 enum tree_code code;
13162 struct tree_function_decl buf;
13167 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13168 <= sizeof (struct tree_function_decl))
13169 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13172 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13176 code = TREE_CODE (expr);
13177 if (TREE_CODE_CLASS (code) == tcc_declaration
13178 && DECL_ASSEMBLER_NAME_SET_P (expr))
13180 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13181 memcpy ((char *) &buf, expr, tree_size (expr));
13182 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13183 expr = (tree) &buf;
13185 else if (TREE_CODE_CLASS (code) == tcc_type
13186 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13187 || TYPE_CACHED_VALUES_P (expr)
13188 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13190 /* Allow these fields to be modified. */
13192 memcpy ((char *) &buf, expr, tree_size (expr));
13193 expr = tmp = (tree) &buf;
13194 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13195 TYPE_POINTER_TO (tmp) = NULL;
13196 TYPE_REFERENCE_TO (tmp) = NULL;
13197 if (TYPE_CACHED_VALUES_P (tmp))
13199 TYPE_CACHED_VALUES_P (tmp) = 0;
13200 TYPE_CACHED_VALUES (tmp) = NULL;
13203 md5_process_bytes (expr, tree_size (expr), ctx);
13204 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13205 if (TREE_CODE_CLASS (code) != tcc_type
13206 && TREE_CODE_CLASS (code) != tcc_declaration
13207 && code != TREE_LIST
13208 && code != SSA_NAME)
13209 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13210 switch (TREE_CODE_CLASS (code))
13216 md5_process_bytes (TREE_STRING_POINTER (expr),
13217 TREE_STRING_LENGTH (expr), ctx);
13220 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13221 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13224 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13230 case tcc_exceptional:
13234 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13235 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13236 expr = TREE_CHAIN (expr);
13237 goto recursive_label;
13240 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13241 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13247 case tcc_expression:
13248 case tcc_reference:
13249 case tcc_comparison:
13252 case tcc_statement:
13254 len = TREE_OPERAND_LENGTH (expr);
13255 for (i = 0; i < len; ++i)
13256 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13258 case tcc_declaration:
13259 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13260 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13261 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13263 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13264 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13265 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13266 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13267 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13269 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13270 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13272 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13274 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13275 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13276 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13280 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13281 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13282 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13283 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13284 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13285 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13286 if (INTEGRAL_TYPE_P (expr)
13287 || SCALAR_FLOAT_TYPE_P (expr))
13289 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13290 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13292 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13293 if (TREE_CODE (expr) == RECORD_TYPE
13294 || TREE_CODE (expr) == UNION_TYPE
13295 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13296 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13297 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13304 /* Helper function for outputting the checksum of a tree T. When
13305 debugging with gdb, you can "define mynext" to be "next" followed
13306 by "call debug_fold_checksum (op0)", then just trace down till the
13310 debug_fold_checksum (const_tree t)
13313 unsigned char checksum[16];
13314 struct md5_ctx ctx;
13315 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13317 md5_init_ctx (&ctx);
13318 fold_checksum_tree (t, &ctx, ht);
13319 md5_finish_ctx (&ctx, checksum);
13322 for (i = 0; i < 16; i++)
13323 fprintf (stderr, "%d ", checksum[i]);
13325 fprintf (stderr, "\n");
13330 /* Fold a unary tree expression with code CODE of type TYPE with an
13331 operand OP0. Return a folded expression if successful. Otherwise,
13332 return a tree expression with code CODE of type TYPE with an
13336 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13339 #ifdef ENABLE_FOLD_CHECKING
13340 unsigned char checksum_before[16], checksum_after[16];
13341 struct md5_ctx ctx;
13344 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13345 md5_init_ctx (&ctx);
13346 fold_checksum_tree (op0, &ctx, ht);
13347 md5_finish_ctx (&ctx, checksum_before);
13351 tem = fold_unary (code, type, op0);
13353 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13355 #ifdef ENABLE_FOLD_CHECKING
13356 md5_init_ctx (&ctx);
13357 fold_checksum_tree (op0, &ctx, ht);
13358 md5_finish_ctx (&ctx, checksum_after);
13361 if (memcmp (checksum_before, checksum_after, 16))
13362 fold_check_failed (op0, tem);
13367 /* Fold a binary tree expression with code CODE of type TYPE with
13368 operands OP0 and OP1. Return a folded expression if successful.
13369 Otherwise, return a tree expression with code CODE of type TYPE
13370 with operands OP0 and OP1. */
13373 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13377 #ifdef ENABLE_FOLD_CHECKING
13378 unsigned char checksum_before_op0[16],
13379 checksum_before_op1[16],
13380 checksum_after_op0[16],
13381 checksum_after_op1[16];
13382 struct md5_ctx ctx;
13385 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13386 md5_init_ctx (&ctx);
13387 fold_checksum_tree (op0, &ctx, ht);
13388 md5_finish_ctx (&ctx, checksum_before_op0);
13391 md5_init_ctx (&ctx);
13392 fold_checksum_tree (op1, &ctx, ht);
13393 md5_finish_ctx (&ctx, checksum_before_op1);
13397 tem = fold_binary (code, type, op0, op1);
13399 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13401 #ifdef ENABLE_FOLD_CHECKING
13402 md5_init_ctx (&ctx);
13403 fold_checksum_tree (op0, &ctx, ht);
13404 md5_finish_ctx (&ctx, checksum_after_op0);
13407 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13408 fold_check_failed (op0, tem);
13410 md5_init_ctx (&ctx);
13411 fold_checksum_tree (op1, &ctx, ht);
13412 md5_finish_ctx (&ctx, checksum_after_op1);
13415 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13416 fold_check_failed (op1, tem);
13421 /* Fold a ternary tree expression with code CODE of type TYPE with
13422 operands OP0, OP1, and OP2. Return a folded expression if
13423 successful. Otherwise, return a tree expression with code CODE of
13424 type TYPE with operands OP0, OP1, and OP2. */
13427 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13431 #ifdef ENABLE_FOLD_CHECKING
13432 unsigned char checksum_before_op0[16],
13433 checksum_before_op1[16],
13434 checksum_before_op2[16],
13435 checksum_after_op0[16],
13436 checksum_after_op1[16],
13437 checksum_after_op2[16];
13438 struct md5_ctx ctx;
13441 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13442 md5_init_ctx (&ctx);
13443 fold_checksum_tree (op0, &ctx, ht);
13444 md5_finish_ctx (&ctx, checksum_before_op0);
13447 md5_init_ctx (&ctx);
13448 fold_checksum_tree (op1, &ctx, ht);
13449 md5_finish_ctx (&ctx, checksum_before_op1);
13452 md5_init_ctx (&ctx);
13453 fold_checksum_tree (op2, &ctx, ht);
13454 md5_finish_ctx (&ctx, checksum_before_op2);
13458 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13459 tem = fold_ternary (code, type, op0, op1, op2);
13461 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13463 #ifdef ENABLE_FOLD_CHECKING
13464 md5_init_ctx (&ctx);
13465 fold_checksum_tree (op0, &ctx, ht);
13466 md5_finish_ctx (&ctx, checksum_after_op0);
13469 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13470 fold_check_failed (op0, tem);
13472 md5_init_ctx (&ctx);
13473 fold_checksum_tree (op1, &ctx, ht);
13474 md5_finish_ctx (&ctx, checksum_after_op1);
13477 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13478 fold_check_failed (op1, tem);
13480 md5_init_ctx (&ctx);
13481 fold_checksum_tree (op2, &ctx, ht);
13482 md5_finish_ctx (&ctx, checksum_after_op2);
13485 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13486 fold_check_failed (op2, tem);
13491 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13492 arguments in ARGARRAY, and a null static chain.
13493 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13494 of type TYPE from the given operands as constructed by build_call_array. */
13497 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13500 #ifdef ENABLE_FOLD_CHECKING
13501 unsigned char checksum_before_fn[16],
13502 checksum_before_arglist[16],
13503 checksum_after_fn[16],
13504 checksum_after_arglist[16];
13505 struct md5_ctx ctx;
13509 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13510 md5_init_ctx (&ctx);
13511 fold_checksum_tree (fn, &ctx, ht);
13512 md5_finish_ctx (&ctx, checksum_before_fn);
13515 md5_init_ctx (&ctx);
13516 for (i = 0; i < nargs; i++)
13517 fold_checksum_tree (argarray[i], &ctx, ht);
13518 md5_finish_ctx (&ctx, checksum_before_arglist);
13522 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13524 #ifdef ENABLE_FOLD_CHECKING
13525 md5_init_ctx (&ctx);
13526 fold_checksum_tree (fn, &ctx, ht);
13527 md5_finish_ctx (&ctx, checksum_after_fn);
13530 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13531 fold_check_failed (fn, tem);
13533 md5_init_ctx (&ctx);
13534 for (i = 0; i < nargs; i++)
13535 fold_checksum_tree (argarray[i], &ctx, ht);
13536 md5_finish_ctx (&ctx, checksum_after_arglist);
13539 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13540 fold_check_failed (NULL_TREE, tem);
13545 /* Perform constant folding and related simplification of initializer
13546 expression EXPR. These behave identically to "fold_buildN" but ignore
13547 potential run-time traps and exceptions that fold must preserve. */
13549 #define START_FOLD_INIT \
13550 int saved_signaling_nans = flag_signaling_nans;\
13551 int saved_trapping_math = flag_trapping_math;\
13552 int saved_rounding_math = flag_rounding_math;\
13553 int saved_trapv = flag_trapv;\
13554 int saved_folding_initializer = folding_initializer;\
13555 flag_signaling_nans = 0;\
13556 flag_trapping_math = 0;\
13557 flag_rounding_math = 0;\
13559 folding_initializer = 1;
13561 #define END_FOLD_INIT \
13562 flag_signaling_nans = saved_signaling_nans;\
13563 flag_trapping_math = saved_trapping_math;\
13564 flag_rounding_math = saved_rounding_math;\
13565 flag_trapv = saved_trapv;\
13566 folding_initializer = saved_folding_initializer;
13569 fold_build1_initializer (enum tree_code code, tree type, tree op)
13574 result = fold_build1 (code, type, op);
13581 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13586 result = fold_build2 (code, type, op0, op1);
13593 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13599 result = fold_build3 (code, type, op0, op1, op2);
13606 fold_build_call_array_initializer (tree type, tree fn,
13607 int nargs, tree *argarray)
13612 result = fold_build_call_array (type, fn, nargs, argarray);
13618 #undef START_FOLD_INIT
13619 #undef END_FOLD_INIT
13621 /* Determine if first argument is a multiple of second argument. Return 0 if
13622 it is not, or we cannot easily determined it to be.
13624 An example of the sort of thing we care about (at this point; this routine
13625 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13626 fold cases do now) is discovering that
13628 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13634 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13636 This code also handles discovering that
13638 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13640 is a multiple of 8 so we don't have to worry about dealing with a
13641 possible remainder.
13643 Note that we *look* inside a SAVE_EXPR only to determine how it was
13644 calculated; it is not safe for fold to do much of anything else with the
13645 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13646 at run time. For example, the latter example above *cannot* be implemented
13647 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13648 evaluation time of the original SAVE_EXPR is not necessarily the same at
13649 the time the new expression is evaluated. The only optimization of this
13650 sort that would be valid is changing
13652 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13656 SAVE_EXPR (I) * SAVE_EXPR (J)
13658 (where the same SAVE_EXPR (J) is used in the original and the
13659 transformed version). */
13662 multiple_of_p (tree type, const_tree top, const_tree bottom)
13664 if (operand_equal_p (top, bottom, 0))
13667 if (TREE_CODE (type) != INTEGER_TYPE)
13670 switch (TREE_CODE (top))
13673 /* Bitwise and provides a power of two multiple. If the mask is
13674 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13675 if (!integer_pow2p (bottom))
13680 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13681 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13685 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13686 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13689 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13693 op1 = TREE_OPERAND (top, 1);
13694 /* const_binop may not detect overflow correctly,
13695 so check for it explicitly here. */
13696 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13697 > TREE_INT_CST_LOW (op1)
13698 && TREE_INT_CST_HIGH (op1) == 0
13699 && 0 != (t1 = fold_convert (type,
13700 const_binop (LSHIFT_EXPR,
13703 && !TREE_OVERFLOW (t1))
13704 return multiple_of_p (type, t1, bottom);
13709 /* Can't handle conversions from non-integral or wider integral type. */
13710 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13711 || (TYPE_PRECISION (type)
13712 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13715 /* .. fall through ... */
13718 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13721 if (TREE_CODE (bottom) != INTEGER_CST
13722 || integer_zerop (bottom)
13723 || (TYPE_UNSIGNED (type)
13724 && (tree_int_cst_sgn (top) < 0
13725 || tree_int_cst_sgn (bottom) < 0)))
13727 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13735 /* Return true if CODE or TYPE is known to be non-negative. */
13738 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13740 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13741 && truth_value_p (code))
13742 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13743 have a signed:1 type (where the value is -1 and 0). */
13748 /* Return true if (CODE OP0) is known to be non-negative. If the return
13749 value is based on the assumption that signed overflow is undefined,
13750 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13751 *STRICT_OVERFLOW_P. */
13754 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13755 bool *strict_overflow_p)
13757 if (TYPE_UNSIGNED (type))
13763 /* We can't return 1 if flag_wrapv is set because
13764 ABS_EXPR<INT_MIN> = INT_MIN. */
13765 if (!INTEGRAL_TYPE_P (type))
13767 if (TYPE_OVERFLOW_UNDEFINED (type))
13769 *strict_overflow_p = true;
13774 case NON_LVALUE_EXPR:
13776 case FIX_TRUNC_EXPR:
13777 return tree_expr_nonnegative_warnv_p (op0,
13778 strict_overflow_p);
13782 tree inner_type = TREE_TYPE (op0);
13783 tree outer_type = type;
13785 if (TREE_CODE (outer_type) == REAL_TYPE)
13787 if (TREE_CODE (inner_type) == REAL_TYPE)
13788 return tree_expr_nonnegative_warnv_p (op0,
13789 strict_overflow_p);
13790 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13792 if (TYPE_UNSIGNED (inner_type))
13794 return tree_expr_nonnegative_warnv_p (op0,
13795 strict_overflow_p);
13798 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13800 if (TREE_CODE (inner_type) == REAL_TYPE)
13801 return tree_expr_nonnegative_warnv_p (op0,
13802 strict_overflow_p);
13803 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13804 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13805 && TYPE_UNSIGNED (inner_type);
13811 return tree_simple_nonnegative_warnv_p (code, type);
13814 /* We don't know sign of `t', so be conservative and return false. */
13818 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13819 value is based on the assumption that signed overflow is undefined,
13820 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13821 *STRICT_OVERFLOW_P. */
13824 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13825 tree op1, bool *strict_overflow_p)
13827 if (TYPE_UNSIGNED (type))
13832 case POINTER_PLUS_EXPR:
13834 if (FLOAT_TYPE_P (type))
13835 return (tree_expr_nonnegative_warnv_p (op0,
13837 && tree_expr_nonnegative_warnv_p (op1,
13838 strict_overflow_p));
13840 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13841 both unsigned and at least 2 bits shorter than the result. */
13842 if (TREE_CODE (type) == INTEGER_TYPE
13843 && TREE_CODE (op0) == NOP_EXPR
13844 && TREE_CODE (op1) == NOP_EXPR)
13846 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13847 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13848 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13849 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13851 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13852 TYPE_PRECISION (inner2)) + 1;
13853 return prec < TYPE_PRECISION (type);
13859 if (FLOAT_TYPE_P (type))
13861 /* x * x for floating point x is always non-negative. */
13862 if (operand_equal_p (op0, op1, 0))
13864 return (tree_expr_nonnegative_warnv_p (op0,
13866 && tree_expr_nonnegative_warnv_p (op1,
13867 strict_overflow_p));
13870 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13871 both unsigned and their total bits is shorter than the result. */
13872 if (TREE_CODE (type) == INTEGER_TYPE
13873 && TREE_CODE (op0) == NOP_EXPR
13874 && TREE_CODE (op1) == NOP_EXPR)
13876 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13877 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13878 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13879 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13880 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13881 < TYPE_PRECISION (type);
13887 return (tree_expr_nonnegative_warnv_p (op0,
13889 || tree_expr_nonnegative_warnv_p (op1,
13890 strict_overflow_p));
13896 case TRUNC_DIV_EXPR:
13897 case CEIL_DIV_EXPR:
13898 case FLOOR_DIV_EXPR:
13899 case ROUND_DIV_EXPR:
13900 return (tree_expr_nonnegative_warnv_p (op0,
13902 && tree_expr_nonnegative_warnv_p (op1,
13903 strict_overflow_p));
13905 case TRUNC_MOD_EXPR:
13906 case CEIL_MOD_EXPR:
13907 case FLOOR_MOD_EXPR:
13908 case ROUND_MOD_EXPR:
13909 return tree_expr_nonnegative_warnv_p (op0,
13910 strict_overflow_p);
13912 return tree_simple_nonnegative_warnv_p (code, type);
13915 /* We don't know sign of `t', so be conservative and return false. */
13919 /* Return true if T is known to be non-negative. If the return
13920 value is based on the assumption that signed overflow is undefined,
13921 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13922 *STRICT_OVERFLOW_P. */
13925 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13927 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13930 switch (TREE_CODE (t))
13933 /* Query VRP to see if it has recorded any information about
13934 the range of this object. */
13935 return ssa_name_nonnegative_p (t);
13938 return tree_int_cst_sgn (t) >= 0;
13941 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13944 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13947 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13949 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13950 strict_overflow_p));
13952 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13955 /* We don't know sign of `t', so be conservative and return false. */
13959 /* Return true if T is known to be non-negative. If the return
13960 value is based on the assumption that signed overflow is undefined,
13961 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13962 *STRICT_OVERFLOW_P. */
13965 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13967 enum tree_code code = TREE_CODE (t);
13968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13975 tree temp = TARGET_EXPR_SLOT (t);
13976 t = TARGET_EXPR_INITIAL (t);
13978 /* If the initializer is non-void, then it's a normal expression
13979 that will be assigned to the slot. */
13980 if (!VOID_TYPE_P (t))
13981 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13983 /* Otherwise, the initializer sets the slot in some way. One common
13984 way is an assignment statement at the end of the initializer. */
13987 if (TREE_CODE (t) == BIND_EXPR)
13988 t = expr_last (BIND_EXPR_BODY (t));
13989 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13990 || TREE_CODE (t) == TRY_CATCH_EXPR)
13991 t = expr_last (TREE_OPERAND (t, 0));
13992 else if (TREE_CODE (t) == STATEMENT_LIST)
13997 if ((TREE_CODE (t) == MODIFY_EXPR
13998 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13999 && GENERIC_TREE_OPERAND (t, 0) == temp)
14000 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14001 strict_overflow_p);
14008 tree fndecl = get_callee_fndecl (t);
14009 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14010 switch (DECL_FUNCTION_CODE (fndecl))
14012 CASE_FLT_FN (BUILT_IN_ACOS):
14013 CASE_FLT_FN (BUILT_IN_ACOSH):
14014 CASE_FLT_FN (BUILT_IN_CABS):
14015 CASE_FLT_FN (BUILT_IN_COSH):
14016 CASE_FLT_FN (BUILT_IN_ERFC):
14017 CASE_FLT_FN (BUILT_IN_EXP):
14018 CASE_FLT_FN (BUILT_IN_EXP10):
14019 CASE_FLT_FN (BUILT_IN_EXP2):
14020 CASE_FLT_FN (BUILT_IN_FABS):
14021 CASE_FLT_FN (BUILT_IN_FDIM):
14022 CASE_FLT_FN (BUILT_IN_HYPOT):
14023 CASE_FLT_FN (BUILT_IN_POW10):
14024 CASE_INT_FN (BUILT_IN_FFS):
14025 CASE_INT_FN (BUILT_IN_PARITY):
14026 CASE_INT_FN (BUILT_IN_POPCOUNT):
14027 case BUILT_IN_BSWAP32:
14028 case BUILT_IN_BSWAP64:
14032 CASE_FLT_FN (BUILT_IN_SQRT):
14033 /* sqrt(-0.0) is -0.0. */
14034 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
14036 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14037 strict_overflow_p);
14039 CASE_FLT_FN (BUILT_IN_ASINH):
14040 CASE_FLT_FN (BUILT_IN_ATAN):
14041 CASE_FLT_FN (BUILT_IN_ATANH):
14042 CASE_FLT_FN (BUILT_IN_CBRT):
14043 CASE_FLT_FN (BUILT_IN_CEIL):
14044 CASE_FLT_FN (BUILT_IN_ERF):
14045 CASE_FLT_FN (BUILT_IN_EXPM1):
14046 CASE_FLT_FN (BUILT_IN_FLOOR):
14047 CASE_FLT_FN (BUILT_IN_FMOD):
14048 CASE_FLT_FN (BUILT_IN_FREXP):
14049 CASE_FLT_FN (BUILT_IN_LCEIL):
14050 CASE_FLT_FN (BUILT_IN_LDEXP):
14051 CASE_FLT_FN (BUILT_IN_LFLOOR):
14052 CASE_FLT_FN (BUILT_IN_LLCEIL):
14053 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14054 CASE_FLT_FN (BUILT_IN_LLRINT):
14055 CASE_FLT_FN (BUILT_IN_LLROUND):
14056 CASE_FLT_FN (BUILT_IN_LRINT):
14057 CASE_FLT_FN (BUILT_IN_LROUND):
14058 CASE_FLT_FN (BUILT_IN_MODF):
14059 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14060 CASE_FLT_FN (BUILT_IN_RINT):
14061 CASE_FLT_FN (BUILT_IN_ROUND):
14062 CASE_FLT_FN (BUILT_IN_SCALB):
14063 CASE_FLT_FN (BUILT_IN_SCALBLN):
14064 CASE_FLT_FN (BUILT_IN_SCALBN):
14065 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14066 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14067 CASE_FLT_FN (BUILT_IN_SINH):
14068 CASE_FLT_FN (BUILT_IN_TANH):
14069 CASE_FLT_FN (BUILT_IN_TRUNC):
14070 /* True if the 1st argument is nonnegative. */
14071 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14072 strict_overflow_p);
14074 CASE_FLT_FN (BUILT_IN_FMAX):
14075 /* True if the 1st OR 2nd arguments are nonnegative. */
14076 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14078 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14079 strict_overflow_p)));
14081 CASE_FLT_FN (BUILT_IN_FMIN):
14082 /* True if the 1st AND 2nd arguments are nonnegative. */
14083 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14085 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14086 strict_overflow_p)));
14088 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14089 /* True if the 2nd argument is nonnegative. */
14090 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14091 strict_overflow_p);
14093 CASE_FLT_FN (BUILT_IN_POWI):
14094 /* True if the 1st argument is nonnegative or the second
14095 argument is an even integer. */
14096 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
14098 tree arg1 = CALL_EXPR_ARG (t, 1);
14099 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
14102 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14103 strict_overflow_p);
14105 CASE_FLT_FN (BUILT_IN_POW):
14106 /* True if the 1st argument is nonnegative or the second
14107 argument is an even integer valued real. */
14108 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
14113 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
14114 n = real_to_integer (&c);
14117 REAL_VALUE_TYPE cint;
14118 real_from_integer (&cint, VOIDmode, n,
14119 n < 0 ? -1 : 0, 0);
14120 if (real_identical (&c, &cint))
14124 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14125 strict_overflow_p);
14130 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14135 case COMPOUND_EXPR:
14137 case GIMPLE_MODIFY_STMT:
14138 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14139 strict_overflow_p);
14141 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14142 strict_overflow_p);
14144 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14145 strict_overflow_p);
14148 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14152 /* We don't know sign of `t', so be conservative and return false. */
14156 /* Return true if T is known to be non-negative. If the return
14157 value is based on the assumption that signed overflow is undefined,
14158 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14159 *STRICT_OVERFLOW_P. */
14162 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14164 enum tree_code code;
14165 if (t == error_mark_node)
14168 code = TREE_CODE (t);
14169 switch (TREE_CODE_CLASS (code))
14172 case tcc_comparison:
14173 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14175 TREE_OPERAND (t, 0),
14176 TREE_OPERAND (t, 1),
14177 strict_overflow_p);
14180 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14182 TREE_OPERAND (t, 0),
14183 strict_overflow_p);
14186 case tcc_declaration:
14187 case tcc_reference:
14188 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14196 case TRUTH_AND_EXPR:
14197 case TRUTH_OR_EXPR:
14198 case TRUTH_XOR_EXPR:
14199 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14201 TREE_OPERAND (t, 0),
14202 TREE_OPERAND (t, 1),
14203 strict_overflow_p);
14204 case TRUTH_NOT_EXPR:
14205 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14207 TREE_OPERAND (t, 0),
14208 strict_overflow_p);
14215 case WITH_SIZE_EXPR:
14219 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14222 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14226 /* Return true if `t' is known to be non-negative. Handle warnings
14227 about undefined signed overflow. */
14230 tree_expr_nonnegative_p (tree t)
14232 bool ret, strict_overflow_p;
14234 strict_overflow_p = false;
14235 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14236 if (strict_overflow_p)
14237 fold_overflow_warning (("assuming signed overflow does not occur when "
14238 "determining that expression is always "
14240 WARN_STRICT_OVERFLOW_MISC);
14245 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14246 For floating point we further ensure that T is not denormal.
14247 Similar logic is present in nonzero_address in rtlanal.h.
14249 If the return value is based on the assumption that signed overflow
14250 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14251 change *STRICT_OVERFLOW_P. */
14254 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14255 bool *strict_overflow_p)
14260 return tree_expr_nonzero_warnv_p (op0,
14261 strict_overflow_p);
14265 tree inner_type = TREE_TYPE (op0);
14266 tree outer_type = type;
14268 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14269 && tree_expr_nonzero_warnv_p (op0,
14270 strict_overflow_p));
14274 case NON_LVALUE_EXPR:
14275 return tree_expr_nonzero_warnv_p (op0,
14276 strict_overflow_p);
14285 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14286 For floating point we further ensure that T is not denormal.
14287 Similar logic is present in nonzero_address in rtlanal.h.
14289 If the return value is based on the assumption that signed overflow
14290 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14291 change *STRICT_OVERFLOW_P. */
14294 tree_binary_nonzero_warnv_p (enum tree_code code,
14297 tree op1, bool *strict_overflow_p)
14299 bool sub_strict_overflow_p;
14302 case POINTER_PLUS_EXPR:
14304 if (TYPE_OVERFLOW_UNDEFINED (type))
14306 /* With the presence of negative values it is hard
14307 to say something. */
14308 sub_strict_overflow_p = false;
14309 if (!tree_expr_nonnegative_warnv_p (op0,
14310 &sub_strict_overflow_p)
14311 || !tree_expr_nonnegative_warnv_p (op1,
14312 &sub_strict_overflow_p))
14314 /* One of operands must be positive and the other non-negative. */
14315 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14316 overflows, on a twos-complement machine the sum of two
14317 nonnegative numbers can never be zero. */
14318 return (tree_expr_nonzero_warnv_p (op0,
14320 || tree_expr_nonzero_warnv_p (op1,
14321 strict_overflow_p));
14326 if (TYPE_OVERFLOW_UNDEFINED (type))
14328 if (tree_expr_nonzero_warnv_p (op0,
14330 && tree_expr_nonzero_warnv_p (op1,
14331 strict_overflow_p))
14333 *strict_overflow_p = true;
14340 sub_strict_overflow_p = false;
14341 if (tree_expr_nonzero_warnv_p (op0,
14342 &sub_strict_overflow_p)
14343 && tree_expr_nonzero_warnv_p (op1,
14344 &sub_strict_overflow_p))
14346 if (sub_strict_overflow_p)
14347 *strict_overflow_p = true;
14352 sub_strict_overflow_p = false;
14353 if (tree_expr_nonzero_warnv_p (op0,
14354 &sub_strict_overflow_p))
14356 if (sub_strict_overflow_p)
14357 *strict_overflow_p = true;
14359 /* When both operands are nonzero, then MAX must be too. */
14360 if (tree_expr_nonzero_warnv_p (op1,
14361 strict_overflow_p))
14364 /* MAX where operand 0 is positive is positive. */
14365 return tree_expr_nonnegative_warnv_p (op0,
14366 strict_overflow_p);
14368 /* MAX where operand 1 is positive is positive. */
14369 else if (tree_expr_nonzero_warnv_p (op1,
14370 &sub_strict_overflow_p)
14371 && tree_expr_nonnegative_warnv_p (op1,
14372 &sub_strict_overflow_p))
14374 if (sub_strict_overflow_p)
14375 *strict_overflow_p = true;
14381 return (tree_expr_nonzero_warnv_p (op1,
14383 || tree_expr_nonzero_warnv_p (op0,
14384 strict_overflow_p));
14393 /* Return true when T is an address and is known to be nonzero.
14394 For floating point we further ensure that T is not denormal.
14395 Similar logic is present in nonzero_address in rtlanal.h.
14397 If the return value is based on the assumption that signed overflow
14398 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14399 change *STRICT_OVERFLOW_P. */
14402 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14404 bool sub_strict_overflow_p;
14405 switch (TREE_CODE (t))
14408 /* Query VRP to see if it has recorded any information about
14409 the range of this object. */
14410 return ssa_name_nonzero_p (t);
14413 return !integer_zerop (t);
14417 tree base = get_base_address (TREE_OPERAND (t, 0));
14422 /* Weak declarations may link to NULL. */
14423 if (VAR_OR_FUNCTION_DECL_P (base))
14424 return !DECL_WEAK (base);
14426 /* Constants are never weak. */
14427 if (CONSTANT_CLASS_P (base))
14434 sub_strict_overflow_p = false;
14435 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14436 &sub_strict_overflow_p)
14437 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14438 &sub_strict_overflow_p))
14440 if (sub_strict_overflow_p)
14441 *strict_overflow_p = true;
14452 /* Return true when T is an address and is known to be nonzero.
14453 For floating point we further ensure that T is not denormal.
14454 Similar logic is present in nonzero_address in rtlanal.h.
14456 If the return value is based on the assumption that signed overflow
14457 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14458 change *STRICT_OVERFLOW_P. */
14461 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14463 tree type = TREE_TYPE (t);
14464 enum tree_code code;
14466 /* Doing something useful for floating point would need more work. */
14467 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14470 code = TREE_CODE (t);
14471 switch (TREE_CODE_CLASS (code))
14474 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14475 strict_overflow_p);
14477 case tcc_comparison:
14478 return tree_binary_nonzero_warnv_p (code, type,
14479 TREE_OPERAND (t, 0),
14480 TREE_OPERAND (t, 1),
14481 strict_overflow_p);
14483 case tcc_declaration:
14484 case tcc_reference:
14485 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14493 case TRUTH_NOT_EXPR:
14494 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14495 strict_overflow_p);
14497 case TRUTH_AND_EXPR:
14498 case TRUTH_OR_EXPR:
14499 case TRUTH_XOR_EXPR:
14500 return tree_binary_nonzero_warnv_p (code, type,
14501 TREE_OPERAND (t, 0),
14502 TREE_OPERAND (t, 1),
14503 strict_overflow_p);
14510 case WITH_SIZE_EXPR:
14514 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14516 case COMPOUND_EXPR:
14518 case GIMPLE_MODIFY_STMT:
14520 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14521 strict_overflow_p);
14524 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14525 strict_overflow_p);
14528 return alloca_call_p (t);
14536 /* Return true when T is an address and is known to be nonzero.
14537 Handle warnings about undefined signed overflow. */
14540 tree_expr_nonzero_p (tree t)
14542 bool ret, strict_overflow_p;
14544 strict_overflow_p = false;
14545 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14546 if (strict_overflow_p)
14547 fold_overflow_warning (("assuming signed overflow does not occur when "
14548 "determining that expression is always "
14550 WARN_STRICT_OVERFLOW_MISC);
14554 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14555 attempt to fold the expression to a constant without modifying TYPE,
14558 If the expression could be simplified to a constant, then return
14559 the constant. If the expression would not be simplified to a
14560 constant, then return NULL_TREE. */
14563 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14565 tree tem = fold_binary (code, type, op0, op1);
14566 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14569 /* Given the components of a unary expression CODE, TYPE and OP0,
14570 attempt to fold the expression to a constant without modifying
14573 If the expression could be simplified to a constant, then return
14574 the constant. If the expression would not be simplified to a
14575 constant, then return NULL_TREE. */
14578 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14580 tree tem = fold_unary (code, type, op0);
14581 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14584 /* If EXP represents referencing an element in a constant string
14585 (either via pointer arithmetic or array indexing), return the
14586 tree representing the value accessed, otherwise return NULL. */
14589 fold_read_from_constant_string (tree exp)
14591 if ((TREE_CODE (exp) == INDIRECT_REF
14592 || TREE_CODE (exp) == ARRAY_REF)
14593 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14595 tree exp1 = TREE_OPERAND (exp, 0);
14599 if (TREE_CODE (exp) == INDIRECT_REF)
14600 string = string_constant (exp1, &index);
14603 tree low_bound = array_ref_low_bound (exp);
14604 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14606 /* Optimize the special-case of a zero lower bound.
14608 We convert the low_bound to sizetype to avoid some problems
14609 with constant folding. (E.g. suppose the lower bound is 1,
14610 and its mode is QI. Without the conversion,l (ARRAY
14611 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14612 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14613 if (! integer_zerop (low_bound))
14614 index = size_diffop (index, fold_convert (sizetype, low_bound));
14620 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14621 && TREE_CODE (string) == STRING_CST
14622 && TREE_CODE (index) == INTEGER_CST
14623 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14624 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14626 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14627 return build_int_cst_type (TREE_TYPE (exp),
14628 (TREE_STRING_POINTER (string)
14629 [TREE_INT_CST_LOW (index)]));
14634 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14635 an integer constant, real, or fixed-point constant.
14637 TYPE is the type of the result. */
14640 fold_negate_const (tree arg0, tree type)
14642 tree t = NULL_TREE;
14644 switch (TREE_CODE (arg0))
14648 unsigned HOST_WIDE_INT low;
14649 HOST_WIDE_INT high;
14650 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14651 TREE_INT_CST_HIGH (arg0),
14653 t = force_fit_type_double (type, low, high, 1,
14654 (overflow | TREE_OVERFLOW (arg0))
14655 && !TYPE_UNSIGNED (type));
14660 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14665 FIXED_VALUE_TYPE f;
14666 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14667 &(TREE_FIXED_CST (arg0)), NULL,
14668 TYPE_SATURATING (type));
14669 t = build_fixed (type, f);
14670 /* Propagate overflow flags. */
14671 if (overflow_p | TREE_OVERFLOW (arg0))
14673 TREE_OVERFLOW (t) = 1;
14674 TREE_CONSTANT_OVERFLOW (t) = 1;
14676 else if (TREE_CONSTANT_OVERFLOW (arg0))
14677 TREE_CONSTANT_OVERFLOW (t) = 1;
14682 gcc_unreachable ();
14688 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14689 an integer constant or real constant.
14691 TYPE is the type of the result. */
14694 fold_abs_const (tree arg0, tree type)
14696 tree t = NULL_TREE;
14698 switch (TREE_CODE (arg0))
14701 /* If the value is unsigned, then the absolute value is
14702 the same as the ordinary value. */
14703 if (TYPE_UNSIGNED (type))
14705 /* Similarly, if the value is non-negative. */
14706 else if (INT_CST_LT (integer_minus_one_node, arg0))
14708 /* If the value is negative, then the absolute value is
14712 unsigned HOST_WIDE_INT low;
14713 HOST_WIDE_INT high;
14714 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14715 TREE_INT_CST_HIGH (arg0),
14717 t = force_fit_type_double (type, low, high, -1,
14718 overflow | TREE_OVERFLOW (arg0));
14723 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14724 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14730 gcc_unreachable ();
14736 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14737 constant. TYPE is the type of the result. */
14740 fold_not_const (tree arg0, tree type)
14742 tree t = NULL_TREE;
14744 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14746 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14747 ~TREE_INT_CST_HIGH (arg0), 0,
14748 TREE_OVERFLOW (arg0));
14753 /* Given CODE, a relational operator, the target type, TYPE and two
14754 constant operands OP0 and OP1, return the result of the
14755 relational operation. If the result is not a compile time
14756 constant, then return NULL_TREE. */
14759 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14761 int result, invert;
14763 /* From here on, the only cases we handle are when the result is
14764 known to be a constant. */
14766 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14768 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14769 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14771 /* Handle the cases where either operand is a NaN. */
14772 if (real_isnan (c0) || real_isnan (c1))
14782 case UNORDERED_EXPR:
14796 if (flag_trapping_math)
14802 gcc_unreachable ();
14805 return constant_boolean_node (result, type);
14808 return constant_boolean_node (real_compare (code, c0, c1), type);
14811 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14813 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14814 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14815 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14818 /* Handle equality/inequality of complex constants. */
14819 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14821 tree rcond = fold_relational_const (code, type,
14822 TREE_REALPART (op0),
14823 TREE_REALPART (op1));
14824 tree icond = fold_relational_const (code, type,
14825 TREE_IMAGPART (op0),
14826 TREE_IMAGPART (op1));
14827 if (code == EQ_EXPR)
14828 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14829 else if (code == NE_EXPR)
14830 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14835 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14837 To compute GT, swap the arguments and do LT.
14838 To compute GE, do LT and invert the result.
14839 To compute LE, swap the arguments, do LT and invert the result.
14840 To compute NE, do EQ and invert the result.
14842 Therefore, the code below must handle only EQ and LT. */
14844 if (code == LE_EXPR || code == GT_EXPR)
14849 code = swap_tree_comparison (code);
14852 /* Note that it is safe to invert for real values here because we
14853 have already handled the one case that it matters. */
14856 if (code == NE_EXPR || code == GE_EXPR)
14859 code = invert_tree_comparison (code, false);
14862 /* Compute a result for LT or EQ if args permit;
14863 Otherwise return T. */
14864 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14866 if (code == EQ_EXPR)
14867 result = tree_int_cst_equal (op0, op1);
14868 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14869 result = INT_CST_LT_UNSIGNED (op0, op1);
14871 result = INT_CST_LT (op0, op1);
14878 return constant_boolean_node (result, type);
14881 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14882 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14886 fold_build_cleanup_point_expr (tree type, tree expr)
14888 /* If the expression does not have side effects then we don't have to wrap
14889 it with a cleanup point expression. */
14890 if (!TREE_SIDE_EFFECTS (expr))
14893 /* If the expression is a return, check to see if the expression inside the
14894 return has no side effects or the right hand side of the modify expression
14895 inside the return. If either don't have side effects set we don't need to
14896 wrap the expression in a cleanup point expression. Note we don't check the
14897 left hand side of the modify because it should always be a return decl. */
14898 if (TREE_CODE (expr) == RETURN_EXPR)
14900 tree op = TREE_OPERAND (expr, 0);
14901 if (!op || !TREE_SIDE_EFFECTS (op))
14903 op = TREE_OPERAND (op, 1);
14904 if (!TREE_SIDE_EFFECTS (op))
14908 return build1 (CLEANUP_POINT_EXPR, type, expr);
14911 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14912 of an indirection through OP0, or NULL_TREE if no simplification is
14916 fold_indirect_ref_1 (tree type, tree op0)
14922 subtype = TREE_TYPE (sub);
14923 if (!POINTER_TYPE_P (subtype))
14926 if (TREE_CODE (sub) == ADDR_EXPR)
14928 tree op = TREE_OPERAND (sub, 0);
14929 tree optype = TREE_TYPE (op);
14930 /* *&CONST_DECL -> to the value of the const decl. */
14931 if (TREE_CODE (op) == CONST_DECL)
14932 return DECL_INITIAL (op);
14933 /* *&p => p; make sure to handle *&"str"[cst] here. */
14934 if (type == optype)
14936 tree fop = fold_read_from_constant_string (op);
14942 /* *(foo *)&fooarray => fooarray[0] */
14943 else if (TREE_CODE (optype) == ARRAY_TYPE
14944 && type == TREE_TYPE (optype))
14946 tree type_domain = TYPE_DOMAIN (optype);
14947 tree min_val = size_zero_node;
14948 if (type_domain && TYPE_MIN_VALUE (type_domain))
14949 min_val = TYPE_MIN_VALUE (type_domain);
14950 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14952 /* *(foo *)&complexfoo => __real__ complexfoo */
14953 else if (TREE_CODE (optype) == COMPLEX_TYPE
14954 && type == TREE_TYPE (optype))
14955 return fold_build1 (REALPART_EXPR, type, op);
14956 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14957 else if (TREE_CODE (optype) == VECTOR_TYPE
14958 && type == TREE_TYPE (optype))
14960 tree part_width = TYPE_SIZE (type);
14961 tree index = bitsize_int (0);
14962 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14966 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14967 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14968 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14970 tree op00 = TREE_OPERAND (sub, 0);
14971 tree op01 = TREE_OPERAND (sub, 1);
14975 op00type = TREE_TYPE (op00);
14976 if (TREE_CODE (op00) == ADDR_EXPR
14977 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14978 && type == TREE_TYPE (TREE_TYPE (op00type)))
14980 tree size = TYPE_SIZE_UNIT (type);
14981 if (tree_int_cst_equal (size, op01))
14982 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14986 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14987 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14988 && type == TREE_TYPE (TREE_TYPE (subtype)))
14991 tree min_val = size_zero_node;
14992 sub = build_fold_indirect_ref (sub);
14993 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14994 if (type_domain && TYPE_MIN_VALUE (type_domain))
14995 min_val = TYPE_MIN_VALUE (type_domain);
14996 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15002 /* Builds an expression for an indirection through T, simplifying some
15006 build_fold_indirect_ref (tree t)
15008 tree type = TREE_TYPE (TREE_TYPE (t));
15009 tree sub = fold_indirect_ref_1 (type, t);
15014 return build1 (INDIRECT_REF, type, t);
15017 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15020 fold_indirect_ref (tree t)
15022 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15030 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15031 whose result is ignored. The type of the returned tree need not be
15032 the same as the original expression. */
15035 fold_ignored_result (tree t)
15037 if (!TREE_SIDE_EFFECTS (t))
15038 return integer_zero_node;
15041 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15044 t = TREE_OPERAND (t, 0);
15048 case tcc_comparison:
15049 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15050 t = TREE_OPERAND (t, 0);
15051 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15052 t = TREE_OPERAND (t, 1);
15057 case tcc_expression:
15058 switch (TREE_CODE (t))
15060 case COMPOUND_EXPR:
15061 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15063 t = TREE_OPERAND (t, 0);
15067 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15068 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15070 t = TREE_OPERAND (t, 0);
15083 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15084 This can only be applied to objects of a sizetype. */
15087 round_up (tree value, int divisor)
15089 tree div = NULL_TREE;
15091 gcc_assert (divisor > 0);
15095 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15096 have to do anything. Only do this when we are not given a const,
15097 because in that case, this check is more expensive than just
15099 if (TREE_CODE (value) != INTEGER_CST)
15101 div = build_int_cst (TREE_TYPE (value), divisor);
15103 if (multiple_of_p (TREE_TYPE (value), value, div))
15107 /* If divisor is a power of two, simplify this to bit manipulation. */
15108 if (divisor == (divisor & -divisor))
15110 if (TREE_CODE (value) == INTEGER_CST)
15112 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15113 unsigned HOST_WIDE_INT high;
15116 if ((low & (divisor - 1)) == 0)
15119 overflow_p = TREE_OVERFLOW (value);
15120 high = TREE_INT_CST_HIGH (value);
15121 low &= ~(divisor - 1);
15130 return force_fit_type_double (TREE_TYPE (value), low, high,
15137 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15138 value = size_binop (PLUS_EXPR, value, t);
15139 t = build_int_cst (TREE_TYPE (value), -divisor);
15140 value = size_binop (BIT_AND_EXPR, value, t);
15146 div = build_int_cst (TREE_TYPE (value), divisor);
15147 value = size_binop (CEIL_DIV_EXPR, value, div);
15148 value = size_binop (MULT_EXPR, value, div);
15154 /* Likewise, but round down. */
15157 round_down (tree value, int divisor)
15159 tree div = NULL_TREE;
15161 gcc_assert (divisor > 0);
15165 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15166 have to do anything. Only do this when we are not given a const,
15167 because in that case, this check is more expensive than just
15169 if (TREE_CODE (value) != INTEGER_CST)
15171 div = build_int_cst (TREE_TYPE (value), divisor);
15173 if (multiple_of_p (TREE_TYPE (value), value, div))
15177 /* If divisor is a power of two, simplify this to bit manipulation. */
15178 if (divisor == (divisor & -divisor))
15182 t = build_int_cst (TREE_TYPE (value), -divisor);
15183 value = size_binop (BIT_AND_EXPR, value, t);
15188 div = build_int_cst (TREE_TYPE (value), divisor);
15189 value = size_binop (FLOOR_DIV_EXPR, value, div);
15190 value = size_binop (MULT_EXPR, value, div);
15196 /* Returns the pointer to the base of the object addressed by EXP and
15197 extracts the information about the offset of the access, storing it
15198 to PBITPOS and POFFSET. */
15201 split_address_to_core_and_offset (tree exp,
15202 HOST_WIDE_INT *pbitpos, tree *poffset)
15205 enum machine_mode mode;
15206 int unsignedp, volatilep;
15207 HOST_WIDE_INT bitsize;
15209 if (TREE_CODE (exp) == ADDR_EXPR)
15211 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15212 poffset, &mode, &unsignedp, &volatilep,
15214 core = fold_addr_expr (core);
15220 *poffset = NULL_TREE;
15226 /* Returns true if addresses of E1 and E2 differ by a constant, false
15227 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15230 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15233 HOST_WIDE_INT bitpos1, bitpos2;
15234 tree toffset1, toffset2, tdiff, type;
15236 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15237 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15239 if (bitpos1 % BITS_PER_UNIT != 0
15240 || bitpos2 % BITS_PER_UNIT != 0
15241 || !operand_equal_p (core1, core2, 0))
15244 if (toffset1 && toffset2)
15246 type = TREE_TYPE (toffset1);
15247 if (type != TREE_TYPE (toffset2))
15248 toffset2 = fold_convert (type, toffset2);
15250 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15251 if (!cst_and_fits_in_hwi (tdiff))
15254 *diff = int_cst_value (tdiff);
15256 else if (toffset1 || toffset2)
15258 /* If only one of the offsets is non-constant, the difference cannot
15265 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15269 /* Simplify the floating point expression EXP when the sign of the
15270 result is not significant. Return NULL_TREE if no simplification
15274 fold_strip_sign_ops (tree exp)
15278 switch (TREE_CODE (exp))
15282 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15283 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15287 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15289 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15290 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15291 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15292 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15293 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15294 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15297 case COMPOUND_EXPR:
15298 arg0 = TREE_OPERAND (exp, 0);
15299 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15301 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15305 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15306 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15308 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15309 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15310 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15315 const enum built_in_function fcode = builtin_mathfn_code (exp);
15318 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15319 /* Strip copysign function call, return the 1st argument. */
15320 arg0 = CALL_EXPR_ARG (exp, 0);
15321 arg1 = CALL_EXPR_ARG (exp, 1);
15322 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15325 /* Strip sign ops from the argument of "odd" math functions. */
15326 if (negate_mathfn_p (fcode))
15328 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15330 return build_call_expr (get_callee_fndecl (exp), 1, arg0);