1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
71 int folding_initializer = 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
95 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
96 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
97 static bool negate_mathfn_p (enum built_in_function);
98 static bool negate_expr_p (tree);
99 static tree negate_expr (tree);
100 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
101 static tree associate_trees (tree, tree, enum tree_code, tree);
102 static tree const_binop (enum tree_code, tree, tree, int);
103 static enum comparison_code comparison_to_compcode (enum tree_code);
104 static enum tree_code compcode_to_comparison (enum comparison_code);
105 static tree combine_comparisons (enum tree_code, enum tree_code,
106 enum tree_code, tree, tree, tree);
107 static int truth_value_p (enum tree_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *, bool *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
198 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 unsigned HOST_WIDE_INT low0 = l1;
201 HOST_WIDE_INT high0 = h1;
203 int sign_extended_type;
205 if (POINTER_TYPE_P (type)
206 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = TYPE_PRECISION (type);
211 /* Size types *are* sign extended. */
212 sign_extended_type = (!TYPE_UNSIGNED (type)
213 || (TREE_CODE (type) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 if (prec < HOST_BITS_PER_WIDE_INT)
225 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type)
230 /* No sign extension */;
231 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
232 /* Correct width already. */;
233 else if (prec > HOST_BITS_PER_WIDE_INT)
235 /* Sign extend top half? */
236 if (h1 & ((unsigned HOST_WIDE_INT)1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 else if (prec == HOST_BITS_PER_WIDE_INT)
242 if ((HOST_WIDE_INT)l1 < 0)
247 /* Sign extend bottom half? */
248 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 /* If the value didn't fit, signal overflow. */
259 return l1 != low0 || h1 != high0;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
278 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
279 HOST_WIDE_INT high, int overflowable,
282 int sign_extended_type;
285 /* Size types *are* sign extended. */
286 sign_extended_type = (!TYPE_UNSIGNED (type)
287 || (TREE_CODE (type) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type)));
290 overflow = fit_double_type (low, high, &low, &high, type);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed || overflow)
297 || (overflowable > 0 && sign_extended_type))
299 tree t = make_node (INTEGER_CST);
300 TREE_INT_CST_LOW (t) = low;
301 TREE_INT_CST_HIGH (t) = high;
302 TREE_TYPE (t) = type;
303 TREE_OVERFLOW (t) = 1;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type, low, high);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 unsigned HOST_WIDE_INT l;
328 h = h1 + h2 + (l < l1);
334 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352 return (*hv & h1) < 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
374 HOST_WIDE_INT arg1[4];
375 HOST_WIDE_INT arg2[4];
376 HOST_WIDE_INT prod[4 * 2];
377 unsigned HOST_WIDE_INT carry;
379 unsigned HOST_WIDE_INT toplow, neglow;
380 HOST_WIDE_INT tophigh, neghigh;
382 encode (arg1, l1, h1);
383 encode (arg2, l2, h2);
385 memset (prod, 0, sizeof prod);
387 for (i = 0; i < 4; i++)
390 for (j = 0; j < 4; j++)
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry += arg1[i] * arg2[j];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 prod[k] = LOWPART (carry);
398 carry = HIGHPART (carry);
403 decode (prod, lv, hv);
404 decode (prod + 4, &toplow, &tophigh);
406 /* Unsigned overflow is immediate. */
408 return (toplow | tophigh) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
414 neg_double (l2, h2, &neglow, &neghigh);
415 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
419 neg_double (l1, h1, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
433 HOST_WIDE_INT count, unsigned int prec,
434 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 unsigned HOST_WIDE_INT signmask;
440 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 if (SHIFT_COUNT_TRUNCATED)
447 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
454 else if (count >= HOST_BITS_PER_WIDE_INT)
456 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
461 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
462 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 /* Sign extend all bits that are beyond the precision. */
468 signmask = -((prec > HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT) *hv
470 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
471 : (*lv >> (prec - 1))) & 1);
473 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
478 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
483 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
484 *lv |= signmask << prec;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
495 HOST_WIDE_INT count, unsigned int prec,
496 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
499 unsigned HOST_WIDE_INT signmask;
502 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
505 if (SHIFT_COUNT_TRUNCATED)
508 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
515 else if (count >= HOST_BITS_PER_WIDE_INT)
518 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
522 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count >= (HOST_WIDE_INT)prec)
534 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
539 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
544 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
545 *lv |= signmask << (prec - count);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
556 HOST_WIDE_INT count, unsigned int prec,
557 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 unsigned HOST_WIDE_INT s1l, s2l;
560 HOST_WIDE_INT s1h, s2h;
566 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
567 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
578 HOST_WIDE_INT count, unsigned int prec,
579 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 unsigned HOST_WIDE_INT s1l, s2l;
582 HOST_WIDE_INT s1h, s2h;
588 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
589 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code, int uns,
605 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig,
607 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig,
609 unsigned HOST_WIDE_INT *lquo,
610 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den[4], quo[4];
617 unsigned HOST_WIDE_INT work;
618 unsigned HOST_WIDE_INT carry = 0;
619 unsigned HOST_WIDE_INT lnum = lnum_orig;
620 HOST_WIDE_INT hnum = hnum_orig;
621 unsigned HOST_WIDE_INT lden = lden_orig;
622 HOST_WIDE_INT hden = hden_orig;
625 if (hden == 0 && lden == 0)
626 overflow = 1, lden = 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum, hnum, &lnum, &hnum)
636 && ((HOST_WIDE_INT) lden & hden) == -1)
642 neg_double (lden, hden, &lden, &hden);
646 if (hnum == 0 && hden == 0)
647 { /* single precision */
649 /* This unsigned division rounds toward zero. */
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
663 memset (quo, 0, sizeof quo);
665 memset (num, 0, sizeof num); /* to zero 9th element */
666 memset (den, 0, sizeof den);
668 encode (num, lnum, hnum);
669 encode (den, lden, hden);
671 /* Special code for when the divisor < BASE. */
672 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 /* hnum != 0 already checked. */
675 for (i = 4 - 1; i >= 0; i--)
677 work = num[i] + carry * BASE;
678 quo[i] = work / lden;
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig, den_hi_sig;
687 unsigned HOST_WIDE_INT quo_est, scale;
689 /* Find the highest nonzero divisor digit. */
690 for (i = 4 - 1;; i--)
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale = BASE / (den[den_hi_sig] + 1);
702 { /* scale divisor and dividend */
704 for (i = 0; i <= 4 - 1; i++)
706 work = (num[i] * scale) + carry;
707 num[i] = LOWPART (work);
708 carry = HIGHPART (work);
713 for (i = 0; i <= 4 - 1; i++)
715 work = (den[i] * scale) + carry;
716 den[i] = LOWPART (work);
717 carry = HIGHPART (work);
718 if (den[i] != 0) den_hi_sig = i;
725 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp;
732 num_hi_sig = i + den_hi_sig + 1;
733 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
734 if (num[num_hi_sig] != den[den_hi_sig])
735 quo_est = work / den[den_hi_sig];
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp = work - quo_est * den[den_hi_sig];
742 && (den[den_hi_sig - 1] * quo_est
743 > (tmp * BASE + num[num_hi_sig - 2])))
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
751 for (j = 0; j <= den_hi_sig; j++)
753 work = quo_est * den[j] + carry;
754 carry = HIGHPART (work);
755 work = num[i + j] - LOWPART (work);
756 num[i + j] = LOWPART (work);
757 carry += HIGHPART (work) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 carry = 0; /* add divisor back in */
766 for (j = 0; j <= den_hi_sig; j++)
768 work = num[i + j] + den[j] + carry;
769 carry = HIGHPART (work);
770 num[i + j] = LOWPART (work);
773 num [num_hi_sig] += carry;
776 /* Store the quotient digit. */
781 decode (quo, lquo, hquo);
784 /* If result is negative, make it so. */
786 neg_double (*lquo, *hquo, lquo, hquo);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
790 neg_double (*lrem, *hrem, lrem, hrem);
791 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
796 case TRUNC_MOD_EXPR: /* round toward zero */
797 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 case FLOOR_MOD_EXPR: /* round toward negative infinity */
802 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
813 case CEIL_MOD_EXPR: /* round toward positive infinity */
814 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
824 case ROUND_MOD_EXPR: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem = *lrem;
827 HOST_WIDE_INT habs_rem = *hrem;
828 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
829 HOST_WIDE_INT habs_den = hden, htwice;
831 /* Get absolute values. */
833 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 neg_double (lden, hden, &labs_den, &habs_den);
837 /* If (2 * abs (lrem) >= abs (lden)) */
838 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
839 labs_rem, habs_rem, <wice, &htwice);
841 if (((unsigned HOST_WIDE_INT) habs_den
842 < (unsigned HOST_WIDE_INT) htwice)
843 || (((unsigned HOST_WIDE_INT) habs_den
844 == (unsigned HOST_WIDE_INT) htwice)
845 && (labs_den < ltwice)))
849 add_double (*lquo, *hquo,
850 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
853 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
867 neg_double (*lrem, *hrem, lrem, hrem);
868 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
877 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 unsigned HOST_WIDE_INT int1l, int2l;
880 HOST_WIDE_INT int1h, int2h;
881 unsigned HOST_WIDE_INT quol, reml;
882 HOST_WIDE_INT quoh, remh;
883 tree type = TREE_TYPE (arg1);
884 int uns = TYPE_UNSIGNED (type);
886 int1l = TREE_INT_CST_LOW (arg1);
887 int1h = TREE_INT_CST_HIGH (arg1);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type))
893 type = signed_type_for (type);
894 fit_double_type (int1l, int1h, &int1l, &int1h,
898 fit_double_type (int1l, int1h, &int1l, &int1h, type);
899 int2l = TREE_INT_CST_LOW (arg2);
900 int2h = TREE_INT_CST_HIGH (arg2);
902 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
903 &quol, &quoh, &reml, &remh);
904 if (remh != 0 || reml != 0)
907 return build_int_cst_wide (type, quol, quoh);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
919 static int fold_deferring_overflow_warnings;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
952 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
957 gcc_assert (fold_deferring_overflow_warnings > 0);
958 --fold_deferring_overflow_warnings;
959 if (fold_deferring_overflow_warnings > 0)
961 if (fold_deferred_overflow_warning != NULL
963 && code < (int) fold_deferred_overflow_code)
964 fold_deferred_overflow_code = code;
968 warnmsg = fold_deferred_overflow_warning;
969 fold_deferred_overflow_warning = NULL;
971 if (!issue || warnmsg == NULL)
974 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
977 /* Use the smallest code level when deciding to issue the
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1032 negate_mathfn_p (enum built_in_function code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1073 /* Check whether we may negate an integer constant T without causing
1077 may_negate_without_overflow_p (const_tree t)
1079 unsigned HOST_WIDE_INT val;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1108 negate_expr_p (tree t)
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1121 if (TYPE_OVERFLOW_WRAPS (type))
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1199 return negate_expr_p (tem);
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1262 tem = fold_negate_const (t, type);
1267 tree rpart = negate_expr (TREE_REALPART (t));
1268 tree ipart = negate_expr (TREE_IMAGPART (t));
1270 if ((TREE_CODE (rpart) == REAL_CST
1271 && TREE_CODE (ipart) == REAL_CST)
1272 || (TREE_CODE (rpart) == INTEGER_CST
1273 && TREE_CODE (ipart) == INTEGER_CST))
1274 return build_complex (type, rpart, ipart);
1279 if (negate_expr_p (t))
1280 return fold_build2 (COMPLEX_EXPR, type,
1281 fold_negate_expr (TREE_OPERAND (t, 0)),
1282 fold_negate_expr (TREE_OPERAND (t, 1)));
1286 if (negate_expr_p (t))
1287 return fold_build1 (CONJ_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)));
1292 return TREE_OPERAND (t, 0);
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t, 1))
1300 && reorder_operands_p (TREE_OPERAND (t, 0),
1301 TREE_OPERAND (t, 1)))
1303 tem = negate_expr (TREE_OPERAND (t, 1));
1304 return fold_build2 (MINUS_EXPR, type,
1305 tem, TREE_OPERAND (t, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 tem = negate_expr (TREE_OPERAND (t, 0));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 1));
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1322 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1323 return fold_build2 (MINUS_EXPR, type,
1324 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1328 if (TYPE_UNSIGNED (type))
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 tem = TREE_OPERAND (t, 1);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 TREE_OPERAND (t, 0), negate_expr (tem));
1340 tem = TREE_OPERAND (t, 0);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 negate_expr (tem), TREE_OPERAND (t, 1));
1347 case TRUNC_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 case FLOOR_DIV_EXPR:
1351 case EXACT_DIV_EXPR:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1357 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 const char * const warnmsg = G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem = TREE_OPERAND (t, 1);
1362 if (negate_expr_p (tem))
1364 if (INTEGRAL_TYPE_P (type)
1365 && (TREE_CODE (tem) != INTEGER_CST
1366 || integer_onep (tem)))
1367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1374 if (INTEGRAL_TYPE_P (type)
1375 && (TREE_CODE (tem) != INTEGER_CST
1376 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1377 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1378 return fold_build2 (TREE_CODE (t), type,
1379 negate_expr (tem), TREE_OPERAND (t, 1));
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type) == REAL_TYPE)
1388 tem = strip_float_extensions (t);
1389 if (tem != t && negate_expr_p (tem))
1390 return fold_convert (type, negate_expr (tem));
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t))
1397 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1401 fndecl = get_callee_fndecl (t);
1402 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1403 return build_call_expr (fndecl, 1, arg);
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 tree op1 = TREE_OPERAND (t, 1);
1412 if (TREE_INT_CST_HIGH (op1) == 0
1413 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1414 == TREE_INT_CST_LOW (op1))
1416 tree ntype = TYPE_UNSIGNED (type)
1417 ? signed_type_for (type)
1418 : unsigned_type_for (type);
1419 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1420 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1421 return fold_convert (type, temp);
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1438 negate_expr (tree t)
1445 type = TREE_TYPE (t);
1446 STRIP_SIGN_NOPS (t);
1448 tem = fold_negate_expr (t);
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 return fold_convert (type, tem);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1475 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1476 tree *minus_litp, int negate_p)
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in);
1487 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1488 || TREE_CODE (in) == FIXED_CST)
1490 else if (TREE_CODE (in) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1498 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 tree op0 = TREE_OPERAND (in, 0);
1501 tree op1 = TREE_OPERAND (in, 1);
1502 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1503 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1507 || TREE_CODE (op0) == FIXED_CST)
1508 *litp = op0, op0 = 0;
1509 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1510 || TREE_CODE (op1) == FIXED_CST)
1511 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513 if (op0 != 0 && TREE_CONSTANT (op0))
1514 *conp = op0, op0 = 0;
1515 else if (op1 != 0 && TREE_CONSTANT (op1))
1516 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0 != 0 && op1 != 0)
1525 var = op1, neg_var_p = neg1_p;
1527 /* Now do any needed negations. */
1529 *minus_litp = *litp, *litp = 0;
1531 *conp = negate_expr (*conp);
1533 var = negate_expr (var);
1535 else if (TREE_CONSTANT (in))
1543 *minus_litp = *litp, *litp = 0;
1544 else if (*minus_litp)
1545 *litp = *minus_litp, *minus_litp = 0;
1546 *conp = negate_expr (*conp);
1547 var = negate_expr (var);
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1558 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1569 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 if (code == PLUS_EXPR)
1573 if (TREE_CODE (t1) == NEGATE_EXPR)
1574 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1575 fold_convert (type, TREE_OPERAND (t1, 0)));
1576 else if (TREE_CODE (t2) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1578 fold_convert (type, TREE_OPERAND (t2, 0)));
1579 else if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1582 else if (code == MINUS_EXPR)
1584 if (integer_zerop (t2))
1585 return fold_convert (type, t1);
1588 return build2 (code, type, fold_convert (type, t1),
1589 fold_convert (type, t2));
1592 return fold_build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1600 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1602 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1604 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1619 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1620 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1621 && TYPE_MODE (type1) == TYPE_MODE (type2);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 unsigned HOST_WIDE_INT int1l, int2l;
1635 HOST_WIDE_INT int1h, int2h;
1636 unsigned HOST_WIDE_INT low;
1638 unsigned HOST_WIDE_INT garbagel;
1639 HOST_WIDE_INT garbageh;
1641 tree type = TREE_TYPE (arg1);
1642 int uns = TYPE_UNSIGNED (type);
1644 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1647 int1l = TREE_INT_CST_LOW (arg1);
1648 int1h = TREE_INT_CST_HIGH (arg1);
1649 int2l = TREE_INT_CST_LOW (arg2);
1650 int2h = TREE_INT_CST_HIGH (arg2);
1655 low = int1l | int2l, hi = int1h | int2h;
1659 low = int1l ^ int2l, hi = int1h ^ int2h;
1663 low = int1l & int2l, hi = int1h & int2h;
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1679 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1684 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1688 neg_double (int2l, int2h, &low, &hi);
1689 add_double (int1l, int1h, low, hi, &low, &hi);
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1694 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1697 case TRUNC_DIV_EXPR:
1698 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1699 case EXACT_DIV_EXPR:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1702 && !TREE_OVERFLOW (arg1)
1703 && !TREE_OVERFLOW (arg2)
1704 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 if (code == CEIL_DIV_EXPR)
1709 low = int1l / int2l, hi = 0;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR:
1716 if (int2h == 0 && int2l == 0)
1718 if (int2h == 0 && int2l == 1)
1720 low = int1l, hi = int1h;
1723 if (int1l == int2l && int1h == int2h
1724 && ! (int1l == 0 && int1h == 0))
1729 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1730 &low, &hi, &garbagel, &garbageh);
1733 case TRUNC_MOD_EXPR:
1734 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1737 && !TREE_OVERFLOW (arg1)
1738 && !TREE_OVERFLOW (arg2)
1739 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 if (code == CEIL_MOD_EXPR)
1743 low = int1l % int2l, hi = 0;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR:
1750 if (int2h == 0 && int2l == 0)
1752 overflow = div_and_round_double (code, uns,
1753 int1l, int1h, int2l, int2h,
1754 &garbagel, &garbageh, &low, &hi);
1760 low = (((unsigned HOST_WIDE_INT) int1h
1761 < (unsigned HOST_WIDE_INT) int2h)
1762 || (((unsigned HOST_WIDE_INT) int1h
1763 == (unsigned HOST_WIDE_INT) int2h)
1766 low = (int1h < int2h
1767 || (int1h == int2h && int1l < int2l));
1769 if (low == (code == MIN_EXPR))
1770 low = int1l, hi = int1h;
1772 low = int2l, hi = int2h;
1781 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1788 TREE_OVERFLOW (t) = 1;
1792 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1793 ((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1807 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 /* Sanity check for the recursive cases. */
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return int_const_binop (code, arg1, arg2, notrunc);
1819 if (TREE_CODE (arg1) == REAL_CST)
1821 enum machine_mode mode;
1824 REAL_VALUE_TYPE value;
1825 REAL_VALUE_TYPE result;
1829 /* The following codes are handled by real_arithmetic. */
1844 d1 = TREE_REAL_CST (arg1);
1845 d2 = TREE_REAL_CST (arg2);
1847 type = TREE_TYPE (arg1);
1848 mode = TYPE_MODE (type);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode)
1853 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code == RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2, dconst0)
1860 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1))
1867 else if (REAL_VALUE_ISNAN (d2))
1870 inexact = real_arithmetic (&value, code, &d1, &d2);
1871 real_convert (&result, mode, &value);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode)
1877 && REAL_VALUE_ISINF (result)
1878 && !REAL_VALUE_ISINF (d1)
1879 && !REAL_VALUE_ISINF (d2))
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1888 && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1907 /* The following codes are handled by fixed_arithmetic. */
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
2003 return build_complex (type, real, imag);
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2185 /* See if R is less than the lower bound or greater than the
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2202 tree ut = TYPE_MAX_VALUE (type);
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2230 double_int temp, temp_trunc;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2401 if (TREE_CODE (arg1) == INTEGER_CST)
2402 return fold_convert_const_int_from_int (type, arg1);
2403 else if (TREE_CODE (arg1) == REAL_CST)
2404 return fold_convert_const_int_from_real (code, type, arg1);
2405 else if (TREE_CODE (arg1) == FIXED_CST)
2406 return fold_convert_const_int_from_fixed (type, arg1);
2408 else if (TREE_CODE (type) == REAL_TYPE)
2410 if (TREE_CODE (arg1) == INTEGER_CST)
2411 return build_real_from_int_cst (type, arg1);
2412 else if (TREE_CODE (arg1) == REAL_CST)
2413 return fold_convert_const_real_from_real (type, arg1);
2414 else if (TREE_CODE (arg1) == FIXED_CST)
2415 return fold_convert_const_real_from_fixed (type, arg1);
2417 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2419 if (TREE_CODE (arg1) == FIXED_CST)
2420 return fold_convert_const_fixed_from_fixed (type, arg1);
2421 else if (TREE_CODE (arg1) == INTEGER_CST)
2422 return fold_convert_const_fixed_from_int (type, arg1);
2423 else if (TREE_CODE (arg1) == REAL_CST)
2424 return fold_convert_const_fixed_from_real (type, arg1);
2429 /* Construct a vector of zero elements of vector type TYPE. */
2432 build_zero_vector (tree type)
2437 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2438 units = TYPE_VECTOR_SUBPARTS (type);
2441 for (i = 0; i < units; i++)
2442 list = tree_cons (NULL_TREE, elem, list);
2443 return build_vector (type, list);
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2449 fold_convertible_p (const_tree type, const_tree arg)
2451 tree orig = TREE_TYPE (arg);
2456 if (TREE_CODE (arg) == ERROR_MARK
2457 || TREE_CODE (type) == ERROR_MARK
2458 || TREE_CODE (orig) == ERROR_MARK)
2461 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2464 switch (TREE_CODE (type))
2466 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2467 case POINTER_TYPE: case REFERENCE_TYPE:
2469 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2470 || TREE_CODE (orig) == OFFSET_TYPE)
2472 return (TREE_CODE (orig) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2476 case FIXED_POINT_TYPE:
2480 return TREE_CODE (type) == TREE_CODE (orig);
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2491 fold_convert (tree type, tree arg)
2493 tree orig = TREE_TYPE (arg);
2499 if (TREE_CODE (arg) == ERROR_MARK
2500 || TREE_CODE (type) == ERROR_MARK
2501 || TREE_CODE (orig) == ERROR_MARK)
2502 return error_mark_node;
2504 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2505 return fold_build1 (NOP_EXPR, type, arg);
2507 switch (TREE_CODE (type))
2509 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2510 case POINTER_TYPE: case REFERENCE_TYPE:
2512 if (TREE_CODE (arg) == INTEGER_CST)
2514 tem = fold_convert_const (NOP_EXPR, type, arg);
2515 if (tem != NULL_TREE)
2518 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2519 || TREE_CODE (orig) == OFFSET_TYPE)
2520 return fold_build1 (NOP_EXPR, type, arg);
2521 if (TREE_CODE (orig) == COMPLEX_TYPE)
2523 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2524 return fold_convert (type, tem);
2526 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2528 return fold_build1 (NOP_EXPR, type, arg);
2531 if (TREE_CODE (arg) == INTEGER_CST)
2533 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2534 if (tem != NULL_TREE)
2537 else if (TREE_CODE (arg) == REAL_CST)
2539 tem = fold_convert_const (NOP_EXPR, type, arg);
2540 if (tem != NULL_TREE)
2543 else if (TREE_CODE (arg) == FIXED_CST)
2545 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2546 if (tem != NULL_TREE)
2550 switch (TREE_CODE (orig))
2553 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2554 case POINTER_TYPE: case REFERENCE_TYPE:
2555 return fold_build1 (FLOAT_EXPR, type, arg);
2558 return fold_build1 (NOP_EXPR, type, arg);
2560 case FIXED_POINT_TYPE:
2561 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2571 case FIXED_POINT_TYPE:
2572 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2573 || TREE_CODE (arg) == REAL_CST)
2575 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2576 if (tem != NULL_TREE)
2580 switch (TREE_CODE (orig))
2582 case FIXED_POINT_TYPE:
2587 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2590 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2591 return fold_convert (type, tem);
2598 switch (TREE_CODE (orig))
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2604 case FIXED_POINT_TYPE:
2605 return build2 (COMPLEX_EXPR, type,
2606 fold_convert (TREE_TYPE (type), arg),
2607 fold_convert (TREE_TYPE (type), integer_zero_node));
2612 if (TREE_CODE (arg) == COMPLEX_EXPR)
2614 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2615 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2616 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2619 arg = save_expr (arg);
2620 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2621 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2622 rpart = fold_convert (TREE_TYPE (type), rpart);
2623 ipart = fold_convert (TREE_TYPE (type), ipart);
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2632 if (integer_zerop (arg))
2633 return build_zero_vector (type);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2636 || TREE_CODE (orig) == VECTOR_TYPE);
2637 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2640 tem = fold_ignored_result (arg);
2641 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2643 return fold_build1 (NOP_EXPR, type, tem);
2650 /* Return false if expr can be assumed not to be an lvalue, true
2654 maybe_lvalue_p (const_tree x)
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x))
2668 case ALIGN_INDIRECT_REF:
2669 case MISALIGNED_INDIRECT_REF:
2671 case ARRAY_RANGE_REF:
2677 case PREINCREMENT_EXPR:
2678 case PREDECREMENT_EXPR:
2680 case TRY_CATCH_EXPR:
2681 case WITH_CLEANUP_EXPR:
2684 case GIMPLE_MODIFY_STMT:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2712 if (! maybe_lvalue_p (x))
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2726 pedantic_non_lvalue (tree x)
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2742 if (honor_nans && flag_trapping_math)
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2784 swap_tree_comparison (enum tree_code code)
2791 case UNORDERED_EXPR:
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2843 return COMPCODE_UNLT;
2845 return COMPCODE_UNEQ;
2847 return COMPCODE_UNLE;
2849 return COMPCODE_UNGT;
2851 return COMPCODE_LTGT;
2853 return COMPCODE_UNGE;
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2941 else if (flag_trapping_math)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2988 truth_value_p (enum tree_code code)
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. As pointers
3037 strictly don't have a signedness, require either two pointers or
3038 two non-pointers as well. */
3039 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3040 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3043 /* If both types don't have the same precision, then it is not safe
3045 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3051 /* In case both args are comparisons but with different comparison
3052 code, try to swap the comparison operands of one arg to produce
3053 a match and compare that variant. */
3054 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3055 && COMPARISON_CLASS_P (arg0)
3056 && COMPARISON_CLASS_P (arg1))
3058 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3060 if (TREE_CODE (arg0) == swap_code)
3061 return operand_equal_p (TREE_OPERAND (arg0, 0),
3062 TREE_OPERAND (arg1, 1), flags)
3063 && operand_equal_p (TREE_OPERAND (arg0, 1),
3064 TREE_OPERAND (arg1, 0), flags);
3067 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3068 /* This is needed for conversions and for COMPONENT_REF.
3069 Might as well play it safe and always test this. */
3070 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below. */
3082 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3083 && (TREE_CODE (arg0) == SAVE_EXPR
3084 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3087 /* Next handle constant cases, those for which we can return 1 even
3088 if ONLY_CONST is set. */
3089 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3090 switch (TREE_CODE (arg0))
3093 return tree_int_cst_equal (arg0, arg1);
3096 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3097 TREE_FIXED_CST (arg1));
3100 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3101 TREE_REAL_CST (arg1)))
3105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3107 /* If we do not distinguish between signed and unsigned zero,
3108 consider them equal. */
3109 if (real_zerop (arg0) && real_zerop (arg1))
3118 v1 = TREE_VECTOR_CST_ELTS (arg0);
3119 v2 = TREE_VECTOR_CST_ELTS (arg1);
3122 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3125 v1 = TREE_CHAIN (v1);
3126 v2 = TREE_CHAIN (v2);
3133 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3135 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3139 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3140 && ! memcmp (TREE_STRING_POINTER (arg0),
3141 TREE_STRING_POINTER (arg1),
3142 TREE_STRING_LENGTH (arg0)));
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3151 if (flags & OEP_ONLY_CONST)
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3172 case FIX_TRUNC_EXPR:
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3174 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3184 case tcc_comparison:
3186 if (OP_SAME (0) && OP_SAME (1))
3189 /* For commutative ops, allow the other order. */
3190 return (commutative_tree_code (TREE_CODE (arg0))
3191 && operand_equal_p (TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 1), flags)
3193 && operand_equal_p (TREE_OPERAND (arg0, 1),
3194 TREE_OPERAND (arg1, 0), flags));
3197 /* If either of the pointer (or reference) expressions we are
3198 dereferencing contain a side effect, these cannot be equal. */
3199 if (TREE_SIDE_EFFECTS (arg0)
3200 || TREE_SIDE_EFFECTS (arg1))
3203 switch (TREE_CODE (arg0))
3206 case ALIGN_INDIRECT_REF:
3207 case MISALIGNED_INDIRECT_REF:
3213 case ARRAY_RANGE_REF:
3214 /* Operands 2 and 3 may be null.
3215 Compare the array index by value if it is constant first as we
3216 may have different types but same value here. */
3218 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3219 TREE_OPERAND (arg1, 1))
3221 && OP_SAME_WITH_NULL (2)
3222 && OP_SAME_WITH_NULL (3));
3225 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3226 may be NULL when we're called to compare MEM_EXPRs. */
3227 return OP_SAME_WITH_NULL (0)
3229 && OP_SAME_WITH_NULL (2);
3232 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3238 case tcc_expression:
3239 switch (TREE_CODE (arg0))
3242 case TRUTH_NOT_EXPR:
3245 case TRUTH_ANDIF_EXPR:
3246 case TRUTH_ORIF_EXPR:
3247 return OP_SAME (0) && OP_SAME (1);
3249 case TRUTH_AND_EXPR:
3251 case TRUTH_XOR_EXPR:
3252 if (OP_SAME (0) && OP_SAME (1))
3255 /* Otherwise take into account this is a commutative operation. */
3256 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 1), flags)
3258 && operand_equal_p (TREE_OPERAND (arg0, 1),
3259 TREE_OPERAND (arg1, 0), flags));
3266 switch (TREE_CODE (arg0))
3269 /* If the CALL_EXPRs call different functions, then they
3270 clearly can not be equal. */
3271 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3276 unsigned int cef = call_expr_flags (arg0);
3277 if (flags & OEP_PURE_SAME)
3278 cef &= ECF_CONST | ECF_PURE;
3285 /* Now see if all the arguments are the same. */
3287 const_call_expr_arg_iterator iter0, iter1;
3289 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3290 a1 = first_const_call_expr_arg (arg1, &iter1);
3292 a0 = next_const_call_expr_arg (&iter0),
3293 a1 = next_const_call_expr_arg (&iter1))
3294 if (! operand_equal_p (a0, a1, flags))
3297 /* If we get here and both argument lists are exhausted
3298 then the CALL_EXPRs are equal. */
3299 return ! (a0 || a1);
3305 case tcc_declaration:
3306 /* Consider __builtin_sqrt equal to sqrt. */
3307 return (TREE_CODE (arg0) == FUNCTION_DECL
3308 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3309 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3310 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3317 #undef OP_SAME_WITH_NULL
3320 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3321 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3323 When in doubt, return 0. */
3326 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3328 int unsignedp1, unsignedpo;
3329 tree primarg0, primarg1, primother;
3330 unsigned int correct_width;
3332 if (operand_equal_p (arg0, arg1, 0))
3335 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3336 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3339 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3340 and see if the inner values are the same. This removes any
3341 signedness comparison, which doesn't matter here. */
3342 primarg0 = arg0, primarg1 = arg1;
3343 STRIP_NOPS (primarg0);
3344 STRIP_NOPS (primarg1);
3345 if (operand_equal_p (primarg0, primarg1, 0))
3348 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3349 actual comparison operand, ARG0.
3351 First throw away any conversions to wider types
3352 already present in the operands. */
3354 primarg1 = get_narrower (arg1, &unsignedp1);
3355 primother = get_narrower (other, &unsignedpo);
3357 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3358 if (unsignedp1 == unsignedpo
3359 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3360 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3362 tree type = TREE_TYPE (arg0);
3364 /* Make sure shorter operand is extended the right way
3365 to match the longer operand. */
3366 primarg1 = fold_convert (signed_or_unsigned_type_for
3367 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3369 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3376 /* See if ARG is an expression that is either a comparison or is performing
3377 arithmetic on comparisons. The comparisons must only be comparing
3378 two different values, which will be stored in *CVAL1 and *CVAL2; if
3379 they are nonzero it means that some operands have already been found.
3380 No variables may be used anywhere else in the expression except in the
3381 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3382 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3384 If this is true, return 1. Otherwise, return zero. */
3387 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3389 enum tree_code code = TREE_CODE (arg);
3390 enum tree_code_class class = TREE_CODE_CLASS (code);
3392 /* We can handle some of the tcc_expression cases here. */
3393 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3395 else if (class == tcc_expression
3396 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3397 || code == COMPOUND_EXPR))
3400 else if (class == tcc_expression && code == SAVE_EXPR
3401 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3403 /* If we've already found a CVAL1 or CVAL2, this expression is
3404 two complex to handle. */
3405 if (*cval1 || *cval2)
3415 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3418 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3419 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3420 cval1, cval2, save_p));
3425 case tcc_expression:
3426 if (code == COND_EXPR)
3427 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3428 cval1, cval2, save_p)
3429 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3430 cval1, cval2, save_p)
3431 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3432 cval1, cval2, save_p));
3435 case tcc_comparison:
3436 /* First see if we can handle the first operand, then the second. For
3437 the second operand, we know *CVAL1 can't be zero. It must be that
3438 one side of the comparison is each of the values; test for the
3439 case where this isn't true by failing if the two operands
3442 if (operand_equal_p (TREE_OPERAND (arg, 0),
3443 TREE_OPERAND (arg, 1), 0))
3447 *cval1 = TREE_OPERAND (arg, 0);
3448 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3450 else if (*cval2 == 0)
3451 *cval2 = TREE_OPERAND (arg, 0);
3452 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3457 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3459 else if (*cval2 == 0)
3460 *cval2 = TREE_OPERAND (arg, 1);
3461 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3473 /* ARG is a tree that is known to contain just arithmetic operations and
3474 comparisons. Evaluate the operations in the tree substituting NEW0 for
3475 any occurrence of OLD0 as an operand of a comparison and likewise for
3479 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3481 tree type = TREE_TYPE (arg);
3482 enum tree_code code = TREE_CODE (arg);
3483 enum tree_code_class class = TREE_CODE_CLASS (code);
3485 /* We can handle some of the tcc_expression cases here. */
3486 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3488 else if (class == tcc_expression
3489 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3495 return fold_build1 (code, type,
3496 eval_subst (TREE_OPERAND (arg, 0),
3497 old0, new0, old1, new1));
3500 return fold_build2 (code, type,
3501 eval_subst (TREE_OPERAND (arg, 0),
3502 old0, new0, old1, new1),
3503 eval_subst (TREE_OPERAND (arg, 1),
3504 old0, new0, old1, new1));
3506 case tcc_expression:
3510 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3513 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3516 return fold_build3 (code, type,
3517 eval_subst (TREE_OPERAND (arg, 0),
3518 old0, new0, old1, new1),
3519 eval_subst (TREE_OPERAND (arg, 1),
3520 old0, new0, old1, new1),
3521 eval_subst (TREE_OPERAND (arg, 2),
3522 old0, new0, old1, new1));
3526 /* Fall through - ??? */
3528 case tcc_comparison:
3530 tree arg0 = TREE_OPERAND (arg, 0);
3531 tree arg1 = TREE_OPERAND (arg, 1);
3533 /* We need to check both for exact equality and tree equality. The
3534 former will be true if the operand has a side-effect. In that
3535 case, we know the operand occurred exactly once. */
3537 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3539 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3542 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3544 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3547 return fold_build2 (code, type, arg0, arg1);
3555 /* Return a tree for the case when the result of an expression is RESULT
3556 converted to TYPE and OMITTED was previously an operand of the expression
3557 but is now not needed (e.g., we folded OMITTED * 0).
3559 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3560 the conversion of RESULT to TYPE. */
3563 omit_one_operand (tree type, tree result, tree omitted)
3565 tree t = fold_convert (type, result);
3567 /* If the resulting operand is an empty statement, just return the omitted
3568 statement casted to void. */
3569 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3570 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3572 if (TREE_SIDE_EFFECTS (omitted))
3573 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3575 return non_lvalue (t);
3578 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3581 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3583 tree t = fold_convert (type, result);
3585 /* If the resulting operand is an empty statement, just return the omitted
3586 statement casted to void. */
3587 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3588 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3590 if (TREE_SIDE_EFFECTS (omitted))
3591 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3593 return pedantic_non_lvalue (t);
3596 /* Return a tree for the case when the result of an expression is RESULT
3597 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3598 of the expression but are now not needed.
3600 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3601 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3602 evaluated before OMITTED2. Otherwise, if neither has side effects,
3603 just do the conversion of RESULT to TYPE. */
3606 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3608 tree t = fold_convert (type, result);
3610 if (TREE_SIDE_EFFECTS (omitted2))
3611 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3612 if (TREE_SIDE_EFFECTS (omitted1))
3613 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3615 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3619 /* Return a simplified tree node for the truth-negation of ARG. This
3620 never alters ARG itself. We assume that ARG is an operation that
3621 returns a truth value (0 or 1).
3623 FIXME: one would think we would fold the result, but it causes
3624 problems with the dominator optimizer. */
3627 fold_truth_not_expr (tree arg)
3629 tree type = TREE_TYPE (arg);
3630 enum tree_code code = TREE_CODE (arg);
3632 /* If this is a comparison, we can simply invert it, except for
3633 floating-point non-equality comparisons, in which case we just
3634 enclose a TRUTH_NOT_EXPR around what we have. */
3636 if (TREE_CODE_CLASS (code) == tcc_comparison)
3638 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3639 if (FLOAT_TYPE_P (op_type)
3640 && flag_trapping_math
3641 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3642 && code != NE_EXPR && code != EQ_EXPR)
3646 code = invert_tree_comparison (code,
3647 HONOR_NANS (TYPE_MODE (op_type)));
3648 if (code == ERROR_MARK)
3651 return build2 (code, type,
3652 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3659 return constant_boolean_node (integer_zerop (arg), type);
3661 case TRUTH_AND_EXPR:
3662 return build2 (TRUTH_OR_EXPR, type,
3663 invert_truthvalue (TREE_OPERAND (arg, 0)),
3664 invert_truthvalue (TREE_OPERAND (arg, 1)));
3667 return build2 (TRUTH_AND_EXPR, type,
3668 invert_truthvalue (TREE_OPERAND (arg, 0)),
3669 invert_truthvalue (TREE_OPERAND (arg, 1)));
3671 case TRUTH_XOR_EXPR:
3672 /* Here we can invert either operand. We invert the first operand
3673 unless the second operand is a TRUTH_NOT_EXPR in which case our
3674 result is the XOR of the first operand with the inside of the
3675 negation of the second operand. */
3677 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3678 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3679 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3681 return build2 (TRUTH_XOR_EXPR, type,
3682 invert_truthvalue (TREE_OPERAND (arg, 0)),
3683 TREE_OPERAND (arg, 1));
3685 case TRUTH_ANDIF_EXPR:
3686 return build2 (TRUTH_ORIF_EXPR, type,
3687 invert_truthvalue (TREE_OPERAND (arg, 0)),
3688 invert_truthvalue (TREE_OPERAND (arg, 1)));
3690 case TRUTH_ORIF_EXPR:
3691 return build2 (TRUTH_ANDIF_EXPR, type,
3692 invert_truthvalue (TREE_OPERAND (arg, 0)),
3693 invert_truthvalue (TREE_OPERAND (arg, 1)));
3695 case TRUTH_NOT_EXPR:
3696 return TREE_OPERAND (arg, 0);
3700 tree arg1 = TREE_OPERAND (arg, 1);
3701 tree arg2 = TREE_OPERAND (arg, 2);
3702 /* A COND_EXPR may have a throw as one operand, which
3703 then has void type. Just leave void operands
3705 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3706 VOID_TYPE_P (TREE_TYPE (arg1))
3707 ? arg1 : invert_truthvalue (arg1),
3708 VOID_TYPE_P (TREE_TYPE (arg2))
3709 ? arg2 : invert_truthvalue (arg2));
3713 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3714 invert_truthvalue (TREE_OPERAND (arg, 1)));
3716 case NON_LVALUE_EXPR:
3717 return invert_truthvalue (TREE_OPERAND (arg, 0));
3720 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3721 return build1 (TRUTH_NOT_EXPR, type, arg);
3725 return build1 (TREE_CODE (arg), type,
3726 invert_truthvalue (TREE_OPERAND (arg, 0)));
3729 if (!integer_onep (TREE_OPERAND (arg, 1)))
3731 return build2 (EQ_EXPR, type, arg,
3732 build_int_cst (type, 0));
3735 return build1 (TRUTH_NOT_EXPR, type, arg);
3737 case CLEANUP_POINT_EXPR:
3738 return build1 (CLEANUP_POINT_EXPR, type,
3739 invert_truthvalue (TREE_OPERAND (arg, 0)));
3748 /* Return a simplified tree node for the truth-negation of ARG. This
3749 never alters ARG itself. We assume that ARG is an operation that
3750 returns a truth value (0 or 1).
3752 FIXME: one would think we would fold the result, but it causes
3753 problems with the dominator optimizer. */
3756 invert_truthvalue (tree arg)
3760 if (TREE_CODE (arg) == ERROR_MARK)
3763 tem = fold_truth_not_expr (arg);
3765 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3770 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3771 operands are another bit-wise operation with a common input. If so,
3772 distribute the bit operations to save an operation and possibly two if
3773 constants are involved. For example, convert
3774 (A | B) & (A | C) into A | (B & C)
3775 Further simplification will occur if B and C are constants.
3777 If this optimization cannot be done, 0 will be returned. */
3780 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3785 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3786 || TREE_CODE (arg0) == code
3787 || (TREE_CODE (arg0) != BIT_AND_EXPR
3788 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3791 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3793 common = TREE_OPERAND (arg0, 0);
3794 left = TREE_OPERAND (arg0, 1);
3795 right = TREE_OPERAND (arg1, 1);
3797 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3799 common = TREE_OPERAND (arg0, 0);
3800 left = TREE_OPERAND (arg0, 1);
3801 right = TREE_OPERAND (arg1, 0);
3803 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3805 common = TREE_OPERAND (arg0, 1);
3806 left = TREE_OPERAND (arg0, 0);
3807 right = TREE_OPERAND (arg1, 1);
3809 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3811 common = TREE_OPERAND (arg0, 1);
3812 left = TREE_OPERAND (arg0, 0);
3813 right = TREE_OPERAND (arg1, 0);
3818 return fold_build2 (TREE_CODE (arg0), type, common,
3819 fold_build2 (code, type, left, right));
3822 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3823 with code CODE. This optimization is unsafe. */
3825 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3827 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3828 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3830 /* (A / C) +- (B / C) -> (A +- B) / C. */
3832 && operand_equal_p (TREE_OPERAND (arg0, 1),
3833 TREE_OPERAND (arg1, 1), 0))
3834 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3835 fold_build2 (code, type,
3836 TREE_OPERAND (arg0, 0),
3837 TREE_OPERAND (arg1, 0)),
3838 TREE_OPERAND (arg0, 1));
3840 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3841 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3842 TREE_OPERAND (arg1, 0), 0)
3843 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3844 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3846 REAL_VALUE_TYPE r0, r1;
3847 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3848 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3850 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3852 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3853 real_arithmetic (&r0, code, &r0, &r1);
3854 return fold_build2 (MULT_EXPR, type,
3855 TREE_OPERAND (arg0, 0),
3856 build_real (type, r0));
3862 /* Subroutine for fold_truthop: decode a field reference.
3864 If EXP is a comparison reference, we return the innermost reference.
3866 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3867 set to the starting bit number.
3869 If the innermost field can be completely contained in a mode-sized
3870 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3872 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3873 otherwise it is not changed.
3875 *PUNSIGNEDP is set to the signedness of the field.
3877 *PMASK is set to the mask used. This is either contained in a
3878 BIT_AND_EXPR or derived from the width of the field.
3880 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3882 Return 0 if this is not a component reference or is one that we can't
3883 do anything with. */
3886 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3887 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3888 int *punsignedp, int *pvolatilep,
3889 tree *pmask, tree *pand_mask)
3891 tree outer_type = 0;
3893 tree mask, inner, offset;
3895 unsigned int precision;
3897 /* All the optimizations using this function assume integer fields.
3898 There are problems with FP fields since the type_for_size call
3899 below can fail for, e.g., XFmode. */
3900 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3903 /* We are interested in the bare arrangement of bits, so strip everything
3904 that doesn't affect the machine mode. However, record the type of the
3905 outermost expression if it may matter below. */
3906 if (CONVERT_EXPR_P (exp)
3907 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3908 outer_type = TREE_TYPE (exp);
3911 if (TREE_CODE (exp) == BIT_AND_EXPR)
3913 and_mask = TREE_OPERAND (exp, 1);
3914 exp = TREE_OPERAND (exp, 0);
3915 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3916 if (TREE_CODE (and_mask) != INTEGER_CST)
3920 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3921 punsignedp, pvolatilep, false);
3922 if ((inner == exp && and_mask == 0)
3923 || *pbitsize < 0 || offset != 0
3924 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3927 /* If the number of bits in the reference is the same as the bitsize of
3928 the outer type, then the outer type gives the signedness. Otherwise
3929 (in case of a small bitfield) the signedness is unchanged. */
3930 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3931 *punsignedp = TYPE_UNSIGNED (outer_type);
3933 /* Compute the mask to access the bitfield. */
3934 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3935 precision = TYPE_PRECISION (unsigned_type);
3937 mask = build_int_cst_type (unsigned_type, -1);
3939 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3940 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3942 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3944 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3945 fold_convert (unsigned_type, and_mask), mask);
3948 *pand_mask = and_mask;
3952 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3953 represents the sign bit of EXP's type. If EXP represents a sign
3954 or zero extension, also test VAL against the unextended type.
3955 The return value is the (sub)expression whose sign bit is VAL,
3956 or NULL_TREE otherwise. */
3959 sign_bit_p (tree exp, const_tree val)
3961 unsigned HOST_WIDE_INT mask_lo, lo;
3962 HOST_WIDE_INT mask_hi, hi;
3966 /* Tree EXP must have an integral type. */
3967 t = TREE_TYPE (exp);
3968 if (! INTEGRAL_TYPE_P (t))
3971 /* Tree VAL must be an integer constant. */
3972 if (TREE_CODE (val) != INTEGER_CST
3973 || TREE_OVERFLOW (val))
3976 width = TYPE_PRECISION (t);
3977 if (width > HOST_BITS_PER_WIDE_INT)
3979 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3982 mask_hi = ((unsigned HOST_WIDE_INT) -1
3983 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3989 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3992 mask_lo = ((unsigned HOST_WIDE_INT) -1
3993 >> (HOST_BITS_PER_WIDE_INT - width));
3996 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3997 treat VAL as if it were unsigned. */
3998 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3999 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4002 /* Handle extension from a narrower type. */
4003 if (TREE_CODE (exp) == NOP_EXPR
4004 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4005 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4010 /* Subroutine for fold_truthop: determine if an operand is simple enough
4011 to be evaluated unconditionally. */
4014 simple_operand_p (const_tree exp)
4016 /* Strip any conversions that don't change the machine mode. */
4019 return (CONSTANT_CLASS_P (exp)
4020 || TREE_CODE (exp) == SSA_NAME
4022 && ! TREE_ADDRESSABLE (exp)
4023 && ! TREE_THIS_VOLATILE (exp)
4024 && ! DECL_NONLOCAL (exp)
4025 /* Don't regard global variables as simple. They may be
4026 allocated in ways unknown to the compiler (shared memory,
4027 #pragma weak, etc). */
4028 && ! TREE_PUBLIC (exp)
4029 && ! DECL_EXTERNAL (exp)
4030 /* Loading a static variable is unduly expensive, but global
4031 registers aren't expensive. */
4032 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4035 /* The following functions are subroutines to fold_range_test and allow it to
4036 try to change a logical combination of comparisons into a range test.
4039 X == 2 || X == 3 || X == 4 || X == 5
4043 (unsigned) (X - 2) <= 3
4045 We describe each set of comparisons as being either inside or outside
4046 a range, using a variable named like IN_P, and then describe the
4047 range with a lower and upper bound. If one of the bounds is omitted,
4048 it represents either the highest or lowest value of the type.
4050 In the comments below, we represent a range by two numbers in brackets
4051 preceded by a "+" to designate being inside that range, or a "-" to
4052 designate being outside that range, so the condition can be inverted by
4053 flipping the prefix. An omitted bound is represented by a "-". For
4054 example, "- [-, 10]" means being outside the range starting at the lowest
4055 possible value and ending at 10, in other words, being greater than 10.
4056 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4059 We set up things so that the missing bounds are handled in a consistent
4060 manner so neither a missing bound nor "true" and "false" need to be
4061 handled using a special case. */
4063 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4064 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4065 and UPPER1_P are nonzero if the respective argument is an upper bound
4066 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4067 must be specified for a comparison. ARG1 will be converted to ARG0's
4068 type if both are specified. */
4071 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4072 tree arg1, int upper1_p)
4078 /* If neither arg represents infinity, do the normal operation.
4079 Else, if not a comparison, return infinity. Else handle the special
4080 comparison rules. Note that most of the cases below won't occur, but
4081 are handled for consistency. */
4083 if (arg0 != 0 && arg1 != 0)
4085 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4086 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4088 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4091 if (TREE_CODE_CLASS (code) != tcc_comparison)
4094 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4095 for neither. In real maths, we cannot assume open ended ranges are
4096 the same. But, this is computer arithmetic, where numbers are finite.
4097 We can therefore make the transformation of any unbounded range with
4098 the value Z, Z being greater than any representable number. This permits
4099 us to treat unbounded ranges as equal. */
4100 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4101 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4105 result = sgn0 == sgn1;
4108 result = sgn0 != sgn1;
4111 result = sgn0 < sgn1;
4114 result = sgn0 <= sgn1;
4117 result = sgn0 > sgn1;
4120 result = sgn0 >= sgn1;
4126 return constant_boolean_node (result, type);
4129 /* Given EXP, a logical expression, set the range it is testing into
4130 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4131 actually being tested. *PLOW and *PHIGH will be made of the same
4132 type as the returned expression. If EXP is not a comparison, we
4133 will most likely not be returning a useful value and range. Set
4134 *STRICT_OVERFLOW_P to true if the return value is only valid
4135 because signed overflow is undefined; otherwise, do not change
4136 *STRICT_OVERFLOW_P. */
4139 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4140 bool *strict_overflow_p)
4142 enum tree_code code;
4143 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4144 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4146 tree low, high, n_low, n_high;
4148 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4149 and see if we can refine the range. Some of the cases below may not
4150 happen, but it doesn't seem worth worrying about this. We "continue"
4151 the outer loop when we've changed something; otherwise we "break"
4152 the switch, which will "break" the while. */
4155 low = high = build_int_cst (TREE_TYPE (exp), 0);
4159 code = TREE_CODE (exp);
4160 exp_type = TREE_TYPE (exp);
4162 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4164 if (TREE_OPERAND_LENGTH (exp) > 0)
4165 arg0 = TREE_OPERAND (exp, 0);
4166 if (TREE_CODE_CLASS (code) == tcc_comparison
4167 || TREE_CODE_CLASS (code) == tcc_unary
4168 || TREE_CODE_CLASS (code) == tcc_binary)
4169 arg0_type = TREE_TYPE (arg0);
4170 if (TREE_CODE_CLASS (code) == tcc_binary
4171 || TREE_CODE_CLASS (code) == tcc_comparison
4172 || (TREE_CODE_CLASS (code) == tcc_expression
4173 && TREE_OPERAND_LENGTH (exp) > 1))
4174 arg1 = TREE_OPERAND (exp, 1);
4179 case TRUTH_NOT_EXPR:
4180 in_p = ! in_p, exp = arg0;
4183 case EQ_EXPR: case NE_EXPR:
4184 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4185 /* We can only do something if the range is testing for zero
4186 and if the second operand is an integer constant. Note that
4187 saying something is "in" the range we make is done by
4188 complementing IN_P since it will set in the initial case of
4189 being not equal to zero; "out" is leaving it alone. */
4190 if (low == 0 || high == 0
4191 || ! integer_zerop (low) || ! integer_zerop (high)
4192 || TREE_CODE (arg1) != INTEGER_CST)
4197 case NE_EXPR: /* - [c, c] */
4200 case EQ_EXPR: /* + [c, c] */
4201 in_p = ! in_p, low = high = arg1;
4203 case GT_EXPR: /* - [-, c] */
4204 low = 0, high = arg1;
4206 case GE_EXPR: /* + [c, -] */
4207 in_p = ! in_p, low = arg1, high = 0;
4209 case LT_EXPR: /* - [c, -] */
4210 low = arg1, high = 0;
4212 case LE_EXPR: /* + [-, c] */
4213 in_p = ! in_p, low = 0, high = arg1;
4219 /* If this is an unsigned comparison, we also know that EXP is
4220 greater than or equal to zero. We base the range tests we make
4221 on that fact, so we record it here so we can parse existing
4222 range tests. We test arg0_type since often the return type
4223 of, e.g. EQ_EXPR, is boolean. */
4224 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4226 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4228 build_int_cst (arg0_type, 0),
4232 in_p = n_in_p, low = n_low, high = n_high;
4234 /* If the high bound is missing, but we have a nonzero low
4235 bound, reverse the range so it goes from zero to the low bound
4237 if (high == 0 && low && ! integer_zerop (low))
4240 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4241 integer_one_node, 0);
4242 low = build_int_cst (arg0_type, 0);
4250 /* (-x) IN [a,b] -> x in [-b, -a] */
4251 n_low = range_binop (MINUS_EXPR, exp_type,
4252 build_int_cst (exp_type, 0),
4254 n_high = range_binop (MINUS_EXPR, exp_type,
4255 build_int_cst (exp_type, 0),
4257 low = n_low, high = n_high;
4263 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4264 build_int_cst (exp_type, 1));
4267 case PLUS_EXPR: case MINUS_EXPR:
4268 if (TREE_CODE (arg1) != INTEGER_CST)
4271 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4272 move a constant to the other side. */
4273 if (!TYPE_UNSIGNED (arg0_type)
4274 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4277 /* If EXP is signed, any overflow in the computation is undefined,
4278 so we don't worry about it so long as our computations on
4279 the bounds don't overflow. For unsigned, overflow is defined
4280 and this is exactly the right thing. */
4281 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4282 arg0_type, low, 0, arg1, 0);
4283 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4284 arg0_type, high, 1, arg1, 0);
4285 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4286 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4289 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4290 *strict_overflow_p = true;
4292 /* Check for an unsigned range which has wrapped around the maximum
4293 value thus making n_high < n_low, and normalize it. */
4294 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4296 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4297 integer_one_node, 0);
4298 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4299 integer_one_node, 0);
4301 /* If the range is of the form +/- [ x+1, x ], we won't
4302 be able to normalize it. But then, it represents the
4303 whole range or the empty set, so make it
4305 if (tree_int_cst_equal (n_low, low)
4306 && tree_int_cst_equal (n_high, high))
4312 low = n_low, high = n_high;
4317 CASE_CONVERT: case NON_LVALUE_EXPR:
4318 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4321 if (! INTEGRAL_TYPE_P (arg0_type)
4322 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4323 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4326 n_low = low, n_high = high;
4329 n_low = fold_convert (arg0_type, n_low);
4332 n_high = fold_convert (arg0_type, n_high);
4335 /* If we're converting arg0 from an unsigned type, to exp,
4336 a signed type, we will be doing the comparison as unsigned.
4337 The tests above have already verified that LOW and HIGH
4340 So we have to ensure that we will handle large unsigned
4341 values the same way that the current signed bounds treat
4344 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4348 /* For fixed-point modes, we need to pass the saturating flag
4349 as the 2nd parameter. */
4350 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4351 equiv_type = lang_hooks.types.type_for_mode
4352 (TYPE_MODE (arg0_type),
4353 TYPE_SATURATING (arg0_type));
4355 equiv_type = lang_hooks.types.type_for_mode
4356 (TYPE_MODE (arg0_type), 1);
4358 /* A range without an upper bound is, naturally, unbounded.
4359 Since convert would have cropped a very large value, use
4360 the max value for the destination type. */
4362 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4363 : TYPE_MAX_VALUE (arg0_type);
4365 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4366 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4367 fold_convert (arg0_type,
4369 build_int_cst (arg0_type, 1));
4371 /* If the low bound is specified, "and" the range with the
4372 range for which the original unsigned value will be
4376 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4377 1, n_low, n_high, 1,
4378 fold_convert (arg0_type,
4383 in_p = (n_in_p == in_p);
4387 /* Otherwise, "or" the range with the range of the input
4388 that will be interpreted as negative. */
4389 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4390 0, n_low, n_high, 1,
4391 fold_convert (arg0_type,
4396 in_p = (in_p != n_in_p);
4401 low = n_low, high = n_high;
4411 /* If EXP is a constant, we can evaluate whether this is true or false. */
4412 if (TREE_CODE (exp) == INTEGER_CST)
4414 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4416 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4422 *pin_p = in_p, *plow = low, *phigh = high;
4426 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4427 type, TYPE, return an expression to test if EXP is in (or out of, depending
4428 on IN_P) the range. Return 0 if the test couldn't be created. */
4431 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4433 tree etype = TREE_TYPE (exp);
4436 #ifdef HAVE_canonicalize_funcptr_for_compare
4437 /* Disable this optimization for function pointer expressions
4438 on targets that require function pointer canonicalization. */
4439 if (HAVE_canonicalize_funcptr_for_compare
4440 && TREE_CODE (etype) == POINTER_TYPE
4441 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4447 value = build_range_check (type, exp, 1, low, high);
4449 return invert_truthvalue (value);
4454 if (low == 0 && high == 0)
4455 return build_int_cst (type, 1);
4458 return fold_build2 (LE_EXPR, type, exp,
4459 fold_convert (etype, high));
4462 return fold_build2 (GE_EXPR, type, exp,
4463 fold_convert (etype, low));
4465 if (operand_equal_p (low, high, 0))
4466 return fold_build2 (EQ_EXPR, type, exp,
4467 fold_convert (etype, low));
4469 if (integer_zerop (low))
4471 if (! TYPE_UNSIGNED (etype))
4473 etype = unsigned_type_for (etype);
4474 high = fold_convert (etype, high);
4475 exp = fold_convert (etype, exp);
4477 return build_range_check (type, exp, 1, 0, high);
4480 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4481 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4483 unsigned HOST_WIDE_INT lo;
4487 prec = TYPE_PRECISION (etype);
4488 if (prec <= HOST_BITS_PER_WIDE_INT)
4491 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4495 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4496 lo = (unsigned HOST_WIDE_INT) -1;
4499 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4501 if (TYPE_UNSIGNED (etype))
4503 etype = signed_type_for (etype);
4504 exp = fold_convert (etype, exp);
4506 return fold_build2 (GT_EXPR, type, exp,
4507 build_int_cst (etype, 0));
4511 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4512 This requires wrap-around arithmetics for the type of the expression. */
4513 switch (TREE_CODE (etype))
4516 /* There is no requirement that LOW be within the range of ETYPE
4517 if the latter is a subtype. It must, however, be within the base
4518 type of ETYPE. So be sure we do the subtraction in that type. */
4519 if (TREE_TYPE (etype))
4520 etype = TREE_TYPE (etype);
4525 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4526 TYPE_UNSIGNED (etype));
4533 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4534 if (TREE_CODE (etype) == INTEGER_TYPE
4535 && !TYPE_OVERFLOW_WRAPS (etype))
4537 tree utype, minv, maxv;
4539 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4540 for the type in question, as we rely on this here. */
4541 utype = unsigned_type_for (etype);
4542 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4543 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4544 integer_one_node, 1);
4545 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4547 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4554 high = fold_convert (etype, high);
4555 low = fold_convert (etype, low);
4556 exp = fold_convert (etype, exp);
4558 value = const_binop (MINUS_EXPR, high, low, 0);
4561 if (POINTER_TYPE_P (etype))
4563 if (value != 0 && !TREE_OVERFLOW (value))
4565 low = fold_convert (sizetype, low);
4566 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4567 return build_range_check (type,
4568 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4569 1, build_int_cst (etype, 0), value);
4574 if (value != 0 && !TREE_OVERFLOW (value))
4575 return build_range_check (type,
4576 fold_build2 (MINUS_EXPR, etype, exp, low),
4577 1, build_int_cst (etype, 0), value);
4582 /* Return the predecessor of VAL in its type, handling the infinite case. */
4585 range_predecessor (tree val)
4587 tree type = TREE_TYPE (val);
4589 if (INTEGRAL_TYPE_P (type)
4590 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4593 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4596 /* Return the successor of VAL in its type, handling the infinite case. */
4599 range_successor (tree val)
4601 tree type = TREE_TYPE (val);
4603 if (INTEGRAL_TYPE_P (type)
4604 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4607 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4610 /* Given two ranges, see if we can merge them into one. Return 1 if we
4611 can, 0 if we can't. Set the output range into the specified parameters. */
4614 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4615 tree high0, int in1_p, tree low1, tree high1)
4623 int lowequal = ((low0 == 0 && low1 == 0)
4624 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4625 low0, 0, low1, 0)));
4626 int highequal = ((high0 == 0 && high1 == 0)
4627 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4628 high0, 1, high1, 1)));
4630 /* Make range 0 be the range that starts first, or ends last if they
4631 start at the same value. Swap them if it isn't. */
4632 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4635 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4636 high1, 1, high0, 1))))
4638 temp = in0_p, in0_p = in1_p, in1_p = temp;
4639 tem = low0, low0 = low1, low1 = tem;
4640 tem = high0, high0 = high1, high1 = tem;
4643 /* Now flag two cases, whether the ranges are disjoint or whether the
4644 second range is totally subsumed in the first. Note that the tests
4645 below are simplified by the ones above. */
4646 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4647 high0, 1, low1, 0));
4648 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4649 high1, 1, high0, 1));
4651 /* We now have four cases, depending on whether we are including or
4652 excluding the two ranges. */
4655 /* If they don't overlap, the result is false. If the second range
4656 is a subset it is the result. Otherwise, the range is from the start
4657 of the second to the end of the first. */
4659 in_p = 0, low = high = 0;
4661 in_p = 1, low = low1, high = high1;
4663 in_p = 1, low = low1, high = high0;
4666 else if (in0_p && ! in1_p)
4668 /* If they don't overlap, the result is the first range. If they are
4669 equal, the result is false. If the second range is a subset of the
4670 first, and the ranges begin at the same place, we go from just after
4671 the end of the second range to the end of the first. If the second
4672 range is not a subset of the first, or if it is a subset and both
4673 ranges end at the same place, the range starts at the start of the
4674 first range and ends just before the second range.
4675 Otherwise, we can't describe this as a single range. */
4677 in_p = 1, low = low0, high = high0;
4678 else if (lowequal && highequal)
4679 in_p = 0, low = high = 0;
4680 else if (subset && lowequal)
4682 low = range_successor (high1);
4687 /* We are in the weird situation where high0 > high1 but
4688 high1 has no successor. Punt. */
4692 else if (! subset || highequal)
4695 high = range_predecessor (low1);
4699 /* low0 < low1 but low1 has no predecessor. Punt. */
4707 else if (! in0_p && in1_p)
4709 /* If they don't overlap, the result is the second range. If the second
4710 is a subset of the first, the result is false. Otherwise,
4711 the range starts just after the first range and ends at the
4712 end of the second. */
4714 in_p = 1, low = low1, high = high1;
4715 else if (subset || highequal)
4716 in_p = 0, low = high = 0;
4719 low = range_successor (high0);
4724 /* high1 > high0 but high0 has no successor. Punt. */
4732 /* The case where we are excluding both ranges. Here the complex case
4733 is if they don't overlap. In that case, the only time we have a
4734 range is if they are adjacent. If the second is a subset of the
4735 first, the result is the first. Otherwise, the range to exclude
4736 starts at the beginning of the first range and ends at the end of the
4740 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4741 range_successor (high0),
4743 in_p = 0, low = low0, high = high1;
4746 /* Canonicalize - [min, x] into - [-, x]. */
4747 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4748 switch (TREE_CODE (TREE_TYPE (low0)))
4751 if (TYPE_PRECISION (TREE_TYPE (low0))
4752 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4756 if (tree_int_cst_equal (low0,
4757 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4761 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4762 && integer_zerop (low0))
4769 /* Canonicalize - [x, max] into - [x, -]. */
4770 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4771 switch (TREE_CODE (TREE_TYPE (high1)))
4774 if (TYPE_PRECISION (TREE_TYPE (high1))
4775 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4779 if (tree_int_cst_equal (high1,
4780 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4784 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4785 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4787 integer_one_node, 1)))
4794 /* The ranges might be also adjacent between the maximum and
4795 minimum values of the given type. For
4796 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4797 return + [x + 1, y - 1]. */
4798 if (low0 == 0 && high1 == 0)
4800 low = range_successor (high0);
4801 high = range_predecessor (low1);
4802 if (low == 0 || high == 0)
4812 in_p = 0, low = low0, high = high0;
4814 in_p = 0, low = low0, high = high1;
4817 *pin_p = in_p, *plow = low, *phigh = high;
4822 /* Subroutine of fold, looking inside expressions of the form
4823 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4824 of the COND_EXPR. This function is being used also to optimize
4825 A op B ? C : A, by reversing the comparison first.
4827 Return a folded expression whose code is not a COND_EXPR
4828 anymore, or NULL_TREE if no folding opportunity is found. */
4831 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4833 enum tree_code comp_code = TREE_CODE (arg0);
4834 tree arg00 = TREE_OPERAND (arg0, 0);
4835 tree arg01 = TREE_OPERAND (arg0, 1);
4836 tree arg1_type = TREE_TYPE (arg1);
4842 /* If we have A op 0 ? A : -A, consider applying the following
4845 A == 0? A : -A same as -A
4846 A != 0? A : -A same as A
4847 A >= 0? A : -A same as abs (A)
4848 A > 0? A : -A same as abs (A)
4849 A <= 0? A : -A same as -abs (A)
4850 A < 0? A : -A same as -abs (A)
4852 None of these transformations work for modes with signed
4853 zeros. If A is +/-0, the first two transformations will
4854 change the sign of the result (from +0 to -0, or vice
4855 versa). The last four will fix the sign of the result,
4856 even though the original expressions could be positive or
4857 negative, depending on the sign of A.
4859 Note that all these transformations are correct if A is
4860 NaN, since the two alternatives (A and -A) are also NaNs. */
4861 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4862 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4863 ? real_zerop (arg01)
4864 : integer_zerop (arg01))
4865 && ((TREE_CODE (arg2) == NEGATE_EXPR
4866 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4867 /* In the case that A is of the form X-Y, '-A' (arg2) may
4868 have already been folded to Y-X, check for that. */
4869 || (TREE_CODE (arg1) == MINUS_EXPR
4870 && TREE_CODE (arg2) == MINUS_EXPR
4871 && operand_equal_p (TREE_OPERAND (arg1, 0),
4872 TREE_OPERAND (arg2, 1), 0)
4873 && operand_equal_p (TREE_OPERAND (arg1, 1),
4874 TREE_OPERAND (arg2, 0), 0))))
4879 tem = fold_convert (arg1_type, arg1);
4880 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4883 return pedantic_non_lvalue (fold_convert (type, arg1));
4886 if (flag_trapping_math)
4891 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4892 arg1 = fold_convert (signed_type_for
4893 (TREE_TYPE (arg1)), arg1);
4894 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4895 return pedantic_non_lvalue (fold_convert (type, tem));
4898 if (flag_trapping_math)
4902 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4903 arg1 = fold_convert (signed_type_for
4904 (TREE_TYPE (arg1)), arg1);
4905 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4906 return negate_expr (fold_convert (type, tem));
4908 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4912 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4913 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4914 both transformations are correct when A is NaN: A != 0
4915 is then true, and A == 0 is false. */
4917 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4918 && integer_zerop (arg01) && integer_zerop (arg2))
4920 if (comp_code == NE_EXPR)
4921 return pedantic_non_lvalue (fold_convert (type, arg1));
4922 else if (comp_code == EQ_EXPR)
4923 return build_int_cst (type, 0);
4926 /* Try some transformations of A op B ? A : B.
4928 A == B? A : B same as B
4929 A != B? A : B same as A
4930 A >= B? A : B same as max (A, B)
4931 A > B? A : B same as max (B, A)
4932 A <= B? A : B same as min (A, B)
4933 A < B? A : B same as min (B, A)
4935 As above, these transformations don't work in the presence
4936 of signed zeros. For example, if A and B are zeros of
4937 opposite sign, the first two transformations will change
4938 the sign of the result. In the last four, the original
4939 expressions give different results for (A=+0, B=-0) and
4940 (A=-0, B=+0), but the transformed expressions do not.
4942 The first two transformations are correct if either A or B
4943 is a NaN. In the first transformation, the condition will
4944 be false, and B will indeed be chosen. In the case of the
4945 second transformation, the condition A != B will be true,
4946 and A will be chosen.
4948 The conversions to max() and min() are not correct if B is
4949 a number and A is not. The conditions in the original
4950 expressions will be false, so all four give B. The min()
4951 and max() versions would give a NaN instead. */
4952 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4953 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4954 /* Avoid these transformations if the COND_EXPR may be used
4955 as an lvalue in the C++ front-end. PR c++/19199. */
4957 || (strcmp (lang_hooks.name, "GNU C++") != 0
4958 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4959 || ! maybe_lvalue_p (arg1)
4960 || ! maybe_lvalue_p (arg2)))
4962 tree comp_op0 = arg00;
4963 tree comp_op1 = arg01;
4964 tree comp_type = TREE_TYPE (comp_op0);
4966 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4967 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4977 return pedantic_non_lvalue (fold_convert (type, arg2));
4979 return pedantic_non_lvalue (fold_convert (type, arg1));
4984 /* In C++ a ?: expression can be an lvalue, so put the
4985 operand which will be used if they are equal first
4986 so that we can convert this back to the
4987 corresponding COND_EXPR. */
4988 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4990 comp_op0 = fold_convert (comp_type, comp_op0);
4991 comp_op1 = fold_convert (comp_type, comp_op1);
4992 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4993 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4994 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4995 return pedantic_non_lvalue (fold_convert (type, tem));
5002 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5004 comp_op0 = fold_convert (comp_type, comp_op0);
5005 comp_op1 = fold_convert (comp_type, comp_op1);
5006 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5007 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5008 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5009 return pedantic_non_lvalue (fold_convert (type, tem));
5013 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5014 return pedantic_non_lvalue (fold_convert (type, arg2));
5017 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5018 return pedantic_non_lvalue (fold_convert (type, arg1));
5021 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5026 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5027 we might still be able to simplify this. For example,
5028 if C1 is one less or one more than C2, this might have started
5029 out as a MIN or MAX and been transformed by this function.
5030 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5032 if (INTEGRAL_TYPE_P (type)
5033 && TREE_CODE (arg01) == INTEGER_CST
5034 && TREE_CODE (arg2) == INTEGER_CST)
5038 /* We can replace A with C1 in this case. */
5039 arg1 = fold_convert (type, arg01);
5040 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5043 /* If C1 is C2 + 1, this is min(A, C2). */
5044 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5046 && operand_equal_p (arg01,
5047 const_binop (PLUS_EXPR, arg2,
5048 build_int_cst (type, 1), 0),
5050 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5052 fold_convert (type, arg1),
5057 /* If C1 is C2 - 1, this is min(A, C2). */
5058 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5060 && operand_equal_p (arg01,
5061 const_binop (MINUS_EXPR, arg2,
5062 build_int_cst (type, 1), 0),
5064 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5066 fold_convert (type, arg1),
5071 /* If C1 is C2 - 1, this is max(A, C2). */
5072 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5074 && operand_equal_p (arg01,
5075 const_binop (MINUS_EXPR, arg2,
5076 build_int_cst (type, 1), 0),
5078 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5080 fold_convert (type, arg1),
5085 /* If C1 is C2 + 1, this is max(A, C2). */
5086 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5088 && operand_equal_p (arg01,
5089 const_binop (PLUS_EXPR, arg2,
5090 build_int_cst (type, 1), 0),
5092 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5094 fold_convert (type, arg1),
5108 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5109 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5112 /* EXP is some logical combination of boolean tests. See if we can
5113 merge it into some range test. Return the new tree if so. */
5116 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5118 int or_op = (code == TRUTH_ORIF_EXPR
5119 || code == TRUTH_OR_EXPR);
5120 int in0_p, in1_p, in_p;
5121 tree low0, low1, low, high0, high1, high;
5122 bool strict_overflow_p = false;
5123 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5124 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5126 const char * const warnmsg = G_("assuming signed overflow does not occur "
5127 "when simplifying range test");
5129 /* If this is an OR operation, invert both sides; we will invert
5130 again at the end. */
5132 in0_p = ! in0_p, in1_p = ! in1_p;
5134 /* If both expressions are the same, if we can merge the ranges, and we
5135 can build the range test, return it or it inverted. If one of the
5136 ranges is always true or always false, consider it to be the same
5137 expression as the other. */
5138 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5139 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5141 && 0 != (tem = (build_range_check (type,
5143 : rhs != 0 ? rhs : integer_zero_node,
5146 if (strict_overflow_p)
5147 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5148 return or_op ? invert_truthvalue (tem) : tem;
5151 /* On machines where the branch cost is expensive, if this is a
5152 short-circuited branch and the underlying object on both sides
5153 is the same, make a non-short-circuit operation. */
5154 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5155 && lhs != 0 && rhs != 0
5156 && (code == TRUTH_ANDIF_EXPR
5157 || code == TRUTH_ORIF_EXPR)
5158 && operand_equal_p (lhs, rhs, 0))
5160 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5161 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5162 which cases we can't do this. */
5163 if (simple_operand_p (lhs))
5164 return build2 (code == TRUTH_ANDIF_EXPR
5165 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5168 else if (lang_hooks.decls.global_bindings_p () == 0
5169 && ! CONTAINS_PLACEHOLDER_P (lhs))
5171 tree common = save_expr (lhs);
5173 if (0 != (lhs = build_range_check (type, common,
5174 or_op ? ! in0_p : in0_p,
5176 && (0 != (rhs = build_range_check (type, common,
5177 or_op ? ! in1_p : in1_p,
5180 if (strict_overflow_p)
5181 fold_overflow_warning (warnmsg,
5182 WARN_STRICT_OVERFLOW_COMPARISON);
5183 return build2 (code == TRUTH_ANDIF_EXPR
5184 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5193 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5194 bit value. Arrange things so the extra bits will be set to zero if and
5195 only if C is signed-extended to its full width. If MASK is nonzero,
5196 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5199 unextend (tree c, int p, int unsignedp, tree mask)
5201 tree type = TREE_TYPE (c);
5202 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5205 if (p == modesize || unsignedp)
5208 /* We work by getting just the sign bit into the low-order bit, then
5209 into the high-order bit, then sign-extend. We then XOR that value
5211 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5212 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5214 /* We must use a signed type in order to get an arithmetic right shift.
5215 However, we must also avoid introducing accidental overflows, so that
5216 a subsequent call to integer_zerop will work. Hence we must
5217 do the type conversion here. At this point, the constant is either
5218 zero or one, and the conversion to a signed type can never overflow.
5219 We could get an overflow if this conversion is done anywhere else. */
5220 if (TYPE_UNSIGNED (type))
5221 temp = fold_convert (signed_type_for (type), temp);
5223 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5224 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5226 temp = const_binop (BIT_AND_EXPR, temp,
5227 fold_convert (TREE_TYPE (c), mask), 0);
5228 /* If necessary, convert the type back to match the type of C. */
5229 if (TYPE_UNSIGNED (type))
5230 temp = fold_convert (type, temp);
5232 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5235 /* Find ways of folding logical expressions of LHS and RHS:
5236 Try to merge two comparisons to the same innermost item.
5237 Look for range tests like "ch >= '0' && ch <= '9'".
5238 Look for combinations of simple terms on machines with expensive branches
5239 and evaluate the RHS unconditionally.
5241 For example, if we have p->a == 2 && p->b == 4 and we can make an
5242 object large enough to span both A and B, we can do this with a comparison
5243 against the object ANDed with the a mask.
5245 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5246 operations to do this with one comparison.
5248 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5249 function and the one above.
5251 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5252 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5254 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5257 We return the simplified tree or 0 if no optimization is possible. */
5260 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5262 /* If this is the "or" of two comparisons, we can do something if
5263 the comparisons are NE_EXPR. If this is the "and", we can do something
5264 if the comparisons are EQ_EXPR. I.e.,
5265 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5267 WANTED_CODE is this operation code. For single bit fields, we can
5268 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5269 comparison for one-bit fields. */
5271 enum tree_code wanted_code;
5272 enum tree_code lcode, rcode;
5273 tree ll_arg, lr_arg, rl_arg, rr_arg;
5274 tree ll_inner, lr_inner, rl_inner, rr_inner;
5275 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5276 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5277 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5278 HOST_WIDE_INT lnbitsize, lnbitpos;
5279 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5280 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5281 enum machine_mode lnmode;
5282 tree ll_mask, lr_mask, rl_mask, rr_mask;
5283 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5284 tree l_const, r_const;
5285 tree lntype, result;
5286 int first_bit, end_bit;
5288 tree orig_lhs = lhs, orig_rhs = rhs;
5289 enum tree_code orig_code = code;
5291 /* Start by getting the comparison codes. Fail if anything is volatile.
5292 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5293 it were surrounded with a NE_EXPR. */
5295 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5298 lcode = TREE_CODE (lhs);
5299 rcode = TREE_CODE (rhs);
5301 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5303 lhs = build2 (NE_EXPR, truth_type, lhs,
5304 build_int_cst (TREE_TYPE (lhs), 0));
5308 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5310 rhs = build2 (NE_EXPR, truth_type, rhs,
5311 build_int_cst (TREE_TYPE (rhs), 0));
5315 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5316 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5319 ll_arg = TREE_OPERAND (lhs, 0);
5320 lr_arg = TREE_OPERAND (lhs, 1);
5321 rl_arg = TREE_OPERAND (rhs, 0);
5322 rr_arg = TREE_OPERAND (rhs, 1);
5324 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5325 if (simple_operand_p (ll_arg)
5326 && simple_operand_p (lr_arg))
5329 if (operand_equal_p (ll_arg, rl_arg, 0)
5330 && operand_equal_p (lr_arg, rr_arg, 0))
5332 result = combine_comparisons (code, lcode, rcode,
5333 truth_type, ll_arg, lr_arg);
5337 else if (operand_equal_p (ll_arg, rr_arg, 0)
5338 && operand_equal_p (lr_arg, rl_arg, 0))
5340 result = combine_comparisons (code, lcode,
5341 swap_tree_comparison (rcode),
5342 truth_type, ll_arg, lr_arg);
5348 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5349 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5351 /* If the RHS can be evaluated unconditionally and its operands are
5352 simple, it wins to evaluate the RHS unconditionally on machines
5353 with expensive branches. In this case, this isn't a comparison
5354 that can be merged. Avoid doing this if the RHS is a floating-point
5355 comparison since those can trap. */
5357 if (BRANCH_COST >= 2
5358 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5359 && simple_operand_p (rl_arg)
5360 && simple_operand_p (rr_arg))
5362 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5363 if (code == TRUTH_OR_EXPR
5364 && lcode == NE_EXPR && integer_zerop (lr_arg)
5365 && rcode == NE_EXPR && integer_zerop (rr_arg)
5366 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5367 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5368 return build2 (NE_EXPR, truth_type,
5369 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5371 build_int_cst (TREE_TYPE (ll_arg), 0));
5373 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5374 if (code == TRUTH_AND_EXPR
5375 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5376 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5377 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5378 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5379 return build2 (EQ_EXPR, truth_type,
5380 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5382 build_int_cst (TREE_TYPE (ll_arg), 0));
5384 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5386 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5387 return build2 (code, truth_type, lhs, rhs);
5392 /* See if the comparisons can be merged. Then get all the parameters for
5395 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5396 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5400 ll_inner = decode_field_reference (ll_arg,
5401 &ll_bitsize, &ll_bitpos, &ll_mode,
5402 &ll_unsignedp, &volatilep, &ll_mask,
5404 lr_inner = decode_field_reference (lr_arg,
5405 &lr_bitsize, &lr_bitpos, &lr_mode,
5406 &lr_unsignedp, &volatilep, &lr_mask,
5408 rl_inner = decode_field_reference (rl_arg,
5409 &rl_bitsize, &rl_bitpos, &rl_mode,
5410 &rl_unsignedp, &volatilep, &rl_mask,
5412 rr_inner = decode_field_reference (rr_arg,
5413 &rr_bitsize, &rr_bitpos, &rr_mode,
5414 &rr_unsignedp, &volatilep, &rr_mask,
5417 /* It must be true that the inner operation on the lhs of each
5418 comparison must be the same if we are to be able to do anything.
5419 Then see if we have constants. If not, the same must be true for
5421 if (volatilep || ll_inner == 0 || rl_inner == 0
5422 || ! operand_equal_p (ll_inner, rl_inner, 0))
5425 if (TREE_CODE (lr_arg) == INTEGER_CST
5426 && TREE_CODE (rr_arg) == INTEGER_CST)
5427 l_const = lr_arg, r_const = rr_arg;
5428 else if (lr_inner == 0 || rr_inner == 0
5429 || ! operand_equal_p (lr_inner, rr_inner, 0))
5432 l_const = r_const = 0;
5434 /* If either comparison code is not correct for our logical operation,
5435 fail. However, we can convert a one-bit comparison against zero into
5436 the opposite comparison against that bit being set in the field. */
5438 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5439 if (lcode != wanted_code)
5441 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5443 /* Make the left operand unsigned, since we are only interested
5444 in the value of one bit. Otherwise we are doing the wrong
5453 /* This is analogous to the code for l_const above. */
5454 if (rcode != wanted_code)
5456 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5465 /* See if we can find a mode that contains both fields being compared on
5466 the left. If we can't, fail. Otherwise, update all constants and masks
5467 to be relative to a field of that size. */
5468 first_bit = MIN (ll_bitpos, rl_bitpos);
5469 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5470 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5471 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5473 if (lnmode == VOIDmode)
5476 lnbitsize = GET_MODE_BITSIZE (lnmode);
5477 lnbitpos = first_bit & ~ (lnbitsize - 1);
5478 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5479 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5481 if (BYTES_BIG_ENDIAN)
5483 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5484 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5487 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5488 size_int (xll_bitpos), 0);
5489 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5490 size_int (xrl_bitpos), 0);
5494 l_const = fold_convert (lntype, l_const);
5495 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5496 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5497 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5498 fold_build1 (BIT_NOT_EXPR,
5502 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5504 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5509 r_const = fold_convert (lntype, r_const);
5510 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5511 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5512 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5513 fold_build1 (BIT_NOT_EXPR,
5517 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5519 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5523 /* Handle the case of comparisons with constants. If there is something in
5524 common between the masks, those bits of the constants must be the same.
5525 If not, the condition is always false. Test for this to avoid generating
5526 incorrect code below. */
5527 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5528 if (! integer_zerop (result)
5529 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5530 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5532 if (wanted_code == NE_EXPR)
5534 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5535 return constant_boolean_node (true, truth_type);
5539 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5540 return constant_boolean_node (false, truth_type);
5547 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5551 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5554 enum tree_code op_code;
5557 int consts_equal, consts_lt;
5560 STRIP_SIGN_NOPS (arg0);
5562 op_code = TREE_CODE (arg0);
5563 minmax_const = TREE_OPERAND (arg0, 1);
5564 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5565 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5566 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5567 inner = TREE_OPERAND (arg0, 0);
5569 /* If something does not permit us to optimize, return the original tree. */
5570 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5571 || TREE_CODE (comp_const) != INTEGER_CST
5572 || TREE_OVERFLOW (comp_const)
5573 || TREE_CODE (minmax_const) != INTEGER_CST
5574 || TREE_OVERFLOW (minmax_const))
5577 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5578 and GT_EXPR, doing the rest with recursive calls using logical
5582 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5584 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5587 return invert_truthvalue (tem);
5593 fold_build2 (TRUTH_ORIF_EXPR, type,
5594 optimize_minmax_comparison
5595 (EQ_EXPR, type, arg0, comp_const),
5596 optimize_minmax_comparison
5597 (GT_EXPR, type, arg0, comp_const));
5600 if (op_code == MAX_EXPR && consts_equal)
5601 /* MAX (X, 0) == 0 -> X <= 0 */
5602 return fold_build2 (LE_EXPR, type, inner, comp_const);
5604 else if (op_code == MAX_EXPR && consts_lt)
5605 /* MAX (X, 0) == 5 -> X == 5 */
5606 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5608 else if (op_code == MAX_EXPR)
5609 /* MAX (X, 0) == -1 -> false */
5610 return omit_one_operand (type, integer_zero_node, inner);
5612 else if (consts_equal)
5613 /* MIN (X, 0) == 0 -> X >= 0 */
5614 return fold_build2 (GE_EXPR, type, inner, comp_const);
5617 /* MIN (X, 0) == 5 -> false */
5618 return omit_one_operand (type, integer_zero_node, inner);
5621 /* MIN (X, 0) == -1 -> X == -1 */
5622 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5625 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5626 /* MAX (X, 0) > 0 -> X > 0
5627 MAX (X, 0) > 5 -> X > 5 */
5628 return fold_build2 (GT_EXPR, type, inner, comp_const);
5630 else if (op_code == MAX_EXPR)
5631 /* MAX (X, 0) > -1 -> true */
5632 return omit_one_operand (type, integer_one_node, inner);
5634 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5635 /* MIN (X, 0) > 0 -> false
5636 MIN (X, 0) > 5 -> false */
5637 return omit_one_operand (type, integer_zero_node, inner);
5640 /* MIN (X, 0) > -1 -> X > -1 */
5641 return fold_build2 (GT_EXPR, type, inner, comp_const);
5648 /* T is an integer expression that is being multiplied, divided, or taken a
5649 modulus (CODE says which and what kind of divide or modulus) by a
5650 constant C. See if we can eliminate that operation by folding it with
5651 other operations already in T. WIDE_TYPE, if non-null, is a type that
5652 should be used for the computation if wider than our type.
5654 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5655 (X * 2) + (Y * 4). We must, however, be assured that either the original
5656 expression would not overflow or that overflow is undefined for the type
5657 in the language in question.
5659 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5660 the machine has a multiply-accumulate insn or that this is part of an
5661 addressing calculation.
5663 If we return a non-null expression, it is an equivalent form of the
5664 original computation, but need not be in the original type.
5666 We set *STRICT_OVERFLOW_P to true if the return values depends on
5667 signed overflow being undefined. Otherwise we do not change
5668 *STRICT_OVERFLOW_P. */
5671 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5672 bool *strict_overflow_p)
5674 /* To avoid exponential search depth, refuse to allow recursion past
5675 three levels. Beyond that (1) it's highly unlikely that we'll find
5676 something interesting and (2) we've probably processed it before
5677 when we built the inner expression. */
5686 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5693 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5694 bool *strict_overflow_p)
5696 tree type = TREE_TYPE (t);
5697 enum tree_code tcode = TREE_CODE (t);
5698 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5699 > GET_MODE_SIZE (TYPE_MODE (type)))
5700 ? wide_type : type);
5702 int same_p = tcode == code;
5703 tree op0 = NULL_TREE, op1 = NULL_TREE;
5704 bool sub_strict_overflow_p;
5706 /* Don't deal with constants of zero here; they confuse the code below. */
5707 if (integer_zerop (c))
5710 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5711 op0 = TREE_OPERAND (t, 0);
5713 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5714 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5716 /* Note that we need not handle conditional operations here since fold
5717 already handles those cases. So just do arithmetic here. */
5721 /* For a constant, we can always simplify if we are a multiply
5722 or (for divide and modulus) if it is a multiple of our constant. */
5723 if (code == MULT_EXPR
5724 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5725 return const_binop (code, fold_convert (ctype, t),
5726 fold_convert (ctype, c), 0);
5729 CASE_CONVERT: case NON_LVALUE_EXPR:
5730 /* If op0 is an expression ... */
5731 if ((COMPARISON_CLASS_P (op0)
5732 || UNARY_CLASS_P (op0)
5733 || BINARY_CLASS_P (op0)
5734 || VL_EXP_CLASS_P (op0)
5735 || EXPRESSION_CLASS_P (op0))
5736 /* ... and is unsigned, and its type is smaller than ctype,
5737 then we cannot pass through as widening. */
5738 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5739 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5740 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5741 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5742 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5743 /* ... or this is a truncation (t is narrower than op0),
5744 then we cannot pass through this narrowing. */
5745 || (GET_MODE_SIZE (TYPE_MODE (type))
5746 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5747 /* ... or signedness changes for division or modulus,
5748 then we cannot pass through this conversion. */
5749 || (code != MULT_EXPR
5750 && (TYPE_UNSIGNED (ctype)
5751 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5752 /* ... or has undefined overflow while the converted to
5753 type has not, we cannot do the operation in the inner type
5754 as that would introduce undefined overflow. */
5755 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5756 && !TYPE_OVERFLOW_UNDEFINED (type))))
5759 /* Pass the constant down and see if we can make a simplification. If
5760 we can, replace this expression with the inner simplification for
5761 possible later conversion to our or some other type. */
5762 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5763 && TREE_CODE (t2) == INTEGER_CST
5764 && !TREE_OVERFLOW (t2)
5765 && (0 != (t1 = extract_muldiv (op0, t2, code,
5767 ? ctype : NULL_TREE,
5768 strict_overflow_p))))
5773 /* If widening the type changes it from signed to unsigned, then we
5774 must avoid building ABS_EXPR itself as unsigned. */
5775 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5777 tree cstype = (*signed_type_for) (ctype);
5778 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5781 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5782 return fold_convert (ctype, t1);
5786 /* If the constant is negative, we cannot simplify this. */
5787 if (tree_int_cst_sgn (c) == -1)
5791 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5793 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5796 case MIN_EXPR: case MAX_EXPR:
5797 /* If widening the type changes the signedness, then we can't perform
5798 this optimization as that changes the result. */
5799 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5802 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5803 sub_strict_overflow_p = false;
5804 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5805 &sub_strict_overflow_p)) != 0
5806 && (t2 = extract_muldiv (op1, c, code, wide_type,
5807 &sub_strict_overflow_p)) != 0)
5809 if (tree_int_cst_sgn (c) < 0)
5810 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5811 if (sub_strict_overflow_p)
5812 *strict_overflow_p = true;
5813 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5814 fold_convert (ctype, t2));
5818 case LSHIFT_EXPR: case RSHIFT_EXPR:
5819 /* If the second operand is constant, this is a multiplication
5820 or floor division, by a power of two, so we can treat it that
5821 way unless the multiplier or divisor overflows. Signed
5822 left-shift overflow is implementation-defined rather than
5823 undefined in C90, so do not convert signed left shift into
5825 if (TREE_CODE (op1) == INTEGER_CST
5826 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5827 /* const_binop may not detect overflow correctly,
5828 so check for it explicitly here. */
5829 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5830 && TREE_INT_CST_HIGH (op1) == 0
5831 && 0 != (t1 = fold_convert (ctype,
5832 const_binop (LSHIFT_EXPR,
5835 && !TREE_OVERFLOW (t1))
5836 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5837 ? MULT_EXPR : FLOOR_DIV_EXPR,
5838 ctype, fold_convert (ctype, op0), t1),
5839 c, code, wide_type, strict_overflow_p);
5842 case PLUS_EXPR: case MINUS_EXPR:
5843 /* See if we can eliminate the operation on both sides. If we can, we
5844 can return a new PLUS or MINUS. If we can't, the only remaining
5845 cases where we can do anything are if the second operand is a
5847 sub_strict_overflow_p = false;
5848 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5849 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5850 if (t1 != 0 && t2 != 0
5851 && (code == MULT_EXPR
5852 /* If not multiplication, we can only do this if both operands
5853 are divisible by c. */
5854 || (multiple_of_p (ctype, op0, c)
5855 && multiple_of_p (ctype, op1, c))))
5857 if (sub_strict_overflow_p)
5858 *strict_overflow_p = true;
5859 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5860 fold_convert (ctype, t2));
5863 /* If this was a subtraction, negate OP1 and set it to be an addition.
5864 This simplifies the logic below. */
5865 if (tcode == MINUS_EXPR)
5866 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5868 if (TREE_CODE (op1) != INTEGER_CST)
5871 /* If either OP1 or C are negative, this optimization is not safe for
5872 some of the division and remainder types while for others we need
5873 to change the code. */
5874 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5876 if (code == CEIL_DIV_EXPR)
5877 code = FLOOR_DIV_EXPR;
5878 else if (code == FLOOR_DIV_EXPR)
5879 code = CEIL_DIV_EXPR;
5880 else if (code != MULT_EXPR
5881 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5885 /* If it's a multiply or a division/modulus operation of a multiple
5886 of our constant, do the operation and verify it doesn't overflow. */
5887 if (code == MULT_EXPR
5888 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5890 op1 = const_binop (code, fold_convert (ctype, op1),
5891 fold_convert (ctype, c), 0);
5892 /* We allow the constant to overflow with wrapping semantics. */
5894 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5900 /* If we have an unsigned type is not a sizetype, we cannot widen
5901 the operation since it will change the result if the original
5902 computation overflowed. */
5903 if (TYPE_UNSIGNED (ctype)
5904 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5908 /* If we were able to eliminate our operation from the first side,
5909 apply our operation to the second side and reform the PLUS. */
5910 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5911 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5913 /* The last case is if we are a multiply. In that case, we can
5914 apply the distributive law to commute the multiply and addition
5915 if the multiplication of the constants doesn't overflow. */
5916 if (code == MULT_EXPR)
5917 return fold_build2 (tcode, ctype,
5918 fold_build2 (code, ctype,
5919 fold_convert (ctype, op0),
5920 fold_convert (ctype, c)),
5926 /* We have a special case here if we are doing something like
5927 (C * 8) % 4 since we know that's zero. */
5928 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5929 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5930 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5931 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5932 return omit_one_operand (type, integer_zero_node, op0);
5934 /* ... fall through ... */
5936 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5937 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5938 /* If we can extract our operation from the LHS, do so and return a
5939 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5940 do something only if the second operand is a constant. */
5942 && (t1 = extract_muldiv (op0, c, code, wide_type,
5943 strict_overflow_p)) != 0)
5944 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5945 fold_convert (ctype, op1));
5946 else if (tcode == MULT_EXPR && code == MULT_EXPR
5947 && (t1 = extract_muldiv (op1, c, code, wide_type,
5948 strict_overflow_p)) != 0)
5949 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5950 fold_convert (ctype, t1));
5951 else if (TREE_CODE (op1) != INTEGER_CST)
5954 /* If these are the same operation types, we can associate them
5955 assuming no overflow. */
5957 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5958 fold_convert (ctype, c), 1))
5959 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5960 TREE_INT_CST_HIGH (t1),
5961 (TYPE_UNSIGNED (ctype)
5962 && tcode != MULT_EXPR) ? -1 : 1,
5963 TREE_OVERFLOW (t1)))
5964 && !TREE_OVERFLOW (t1))
5965 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5967 /* If these operations "cancel" each other, we have the main
5968 optimizations of this pass, which occur when either constant is a
5969 multiple of the other, in which case we replace this with either an
5970 operation or CODE or TCODE.
5972 If we have an unsigned type that is not a sizetype, we cannot do
5973 this since it will change the result if the original computation
5975 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5976 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5977 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5978 || (tcode == MULT_EXPR
5979 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5980 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5981 && code != MULT_EXPR)))
5983 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5985 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5986 *strict_overflow_p = true;
5987 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5988 fold_convert (ctype,
5989 const_binop (TRUNC_DIV_EXPR,
5992 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5994 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5995 *strict_overflow_p = true;
5996 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5997 fold_convert (ctype,
5998 const_binop (TRUNC_DIV_EXPR,
6011 /* Return a node which has the indicated constant VALUE (either 0 or
6012 1), and is of the indicated TYPE. */
6015 constant_boolean_node (int value, tree type)
6017 if (type == integer_type_node)
6018 return value ? integer_one_node : integer_zero_node;
6019 else if (type == boolean_type_node)
6020 return value ? boolean_true_node : boolean_false_node;
6022 return build_int_cst (type, value);
6026 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6027 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6028 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6029 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6030 COND is the first argument to CODE; otherwise (as in the example
6031 given here), it is the second argument. TYPE is the type of the
6032 original expression. Return NULL_TREE if no simplification is
6036 fold_binary_op_with_conditional_arg (enum tree_code code,
6037 tree type, tree op0, tree op1,
6038 tree cond, tree arg, int cond_first_p)
6040 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6041 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6042 tree test, true_value, false_value;
6043 tree lhs = NULL_TREE;
6044 tree rhs = NULL_TREE;
6046 /* This transformation is only worthwhile if we don't have to wrap
6047 arg in a SAVE_EXPR, and the operation can be simplified on at least
6048 one of the branches once its pushed inside the COND_EXPR. */
6049 if (!TREE_CONSTANT (arg))
6052 if (TREE_CODE (cond) == COND_EXPR)
6054 test = TREE_OPERAND (cond, 0);
6055 true_value = TREE_OPERAND (cond, 1);
6056 false_value = TREE_OPERAND (cond, 2);
6057 /* If this operand throws an expression, then it does not make
6058 sense to try to perform a logical or arithmetic operation
6060 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6062 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6067 tree testtype = TREE_TYPE (cond);
6069 true_value = constant_boolean_node (true, testtype);
6070 false_value = constant_boolean_node (false, testtype);
6073 arg = fold_convert (arg_type, arg);
6076 true_value = fold_convert (cond_type, true_value);
6078 lhs = fold_build2 (code, type, true_value, arg);
6080 lhs = fold_build2 (code, type, arg, true_value);
6084 false_value = fold_convert (cond_type, false_value);
6086 rhs = fold_build2 (code, type, false_value, arg);
6088 rhs = fold_build2 (code, type, arg, false_value);
6091 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6092 return fold_convert (type, test);
6096 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6098 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6099 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6100 ADDEND is the same as X.
6102 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6103 and finite. The problematic cases are when X is zero, and its mode
6104 has signed zeros. In the case of rounding towards -infinity,
6105 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6106 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6109 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6111 if (!real_zerop (addend))
6114 /* Don't allow the fold with -fsignaling-nans. */
6115 if (HONOR_SNANS (TYPE_MODE (type)))
6118 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6119 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6122 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6123 if (TREE_CODE (addend) == REAL_CST
6124 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6127 /* The mode has signed zeros, and we have to honor their sign.
6128 In this situation, there is only one case we can return true for.
6129 X - 0 is the same as X unless rounding towards -infinity is
6131 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6134 /* Subroutine of fold() that checks comparisons of built-in math
6135 functions against real constants.
6137 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6138 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6139 is the type of the result and ARG0 and ARG1 are the operands of the
6140 comparison. ARG1 must be a TREE_REAL_CST.
6142 The function returns the constant folded tree if a simplification
6143 can be made, and NULL_TREE otherwise. */
6146 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6147 tree type, tree arg0, tree arg1)
6151 if (BUILTIN_SQRT_P (fcode))
6153 tree arg = CALL_EXPR_ARG (arg0, 0);
6154 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6156 c = TREE_REAL_CST (arg1);
6157 if (REAL_VALUE_NEGATIVE (c))
6159 /* sqrt(x) < y is always false, if y is negative. */
6160 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6161 return omit_one_operand (type, integer_zero_node, arg);
6163 /* sqrt(x) > y is always true, if y is negative and we
6164 don't care about NaNs, i.e. negative values of x. */
6165 if (code == NE_EXPR || !HONOR_NANS (mode))
6166 return omit_one_operand (type, integer_one_node, arg);
6168 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6169 return fold_build2 (GE_EXPR, type, arg,
6170 build_real (TREE_TYPE (arg), dconst0));
6172 else if (code == GT_EXPR || code == GE_EXPR)
6176 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6177 real_convert (&c2, mode, &c2);
6179 if (REAL_VALUE_ISINF (c2))
6181 /* sqrt(x) > y is x == +Inf, when y is very large. */
6182 if (HONOR_INFINITIES (mode))
6183 return fold_build2 (EQ_EXPR, type, arg,
6184 build_real (TREE_TYPE (arg), c2));
6186 /* sqrt(x) > y is always false, when y is very large
6187 and we don't care about infinities. */
6188 return omit_one_operand (type, integer_zero_node, arg);
6191 /* sqrt(x) > c is the same as x > c*c. */
6192 return fold_build2 (code, type, arg,
6193 build_real (TREE_TYPE (arg), c2));
6195 else if (code == LT_EXPR || code == LE_EXPR)
6199 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6200 real_convert (&c2, mode, &c2);
6202 if (REAL_VALUE_ISINF (c2))
6204 /* sqrt(x) < y is always true, when y is a very large
6205 value and we don't care about NaNs or Infinities. */
6206 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6207 return omit_one_operand (type, integer_one_node, arg);
6209 /* sqrt(x) < y is x != +Inf when y is very large and we
6210 don't care about NaNs. */
6211 if (! HONOR_NANS (mode))
6212 return fold_build2 (NE_EXPR, type, arg,
6213 build_real (TREE_TYPE (arg), c2));
6215 /* sqrt(x) < y is x >= 0 when y is very large and we
6216 don't care about Infinities. */
6217 if (! HONOR_INFINITIES (mode))
6218 return fold_build2 (GE_EXPR, type, arg,
6219 build_real (TREE_TYPE (arg), dconst0));
6221 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6222 if (lang_hooks.decls.global_bindings_p () != 0
6223 || CONTAINS_PLACEHOLDER_P (arg))
6226 arg = save_expr (arg);
6227 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6228 fold_build2 (GE_EXPR, type, arg,
6229 build_real (TREE_TYPE (arg),
6231 fold_build2 (NE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg),
6236 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6237 if (! HONOR_NANS (mode))
6238 return fold_build2 (code, type, arg,
6239 build_real (TREE_TYPE (arg), c2));
6241 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6242 if (lang_hooks.decls.global_bindings_p () == 0
6243 && ! CONTAINS_PLACEHOLDER_P (arg))
6245 arg = save_expr (arg);
6246 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6247 fold_build2 (GE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg),
6250 fold_build2 (code, type, arg,
6251 build_real (TREE_TYPE (arg),
6260 /* Subroutine of fold() that optimizes comparisons against Infinities,
6261 either +Inf or -Inf.
6263 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6264 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6265 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6267 The function returns the constant folded tree if a simplification
6268 can be made, and NULL_TREE otherwise. */
6271 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6273 enum machine_mode mode;
6274 REAL_VALUE_TYPE max;
6278 mode = TYPE_MODE (TREE_TYPE (arg0));
6280 /* For negative infinity swap the sense of the comparison. */
6281 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6283 code = swap_tree_comparison (code);
6288 /* x > +Inf is always false, if with ignore sNANs. */
6289 if (HONOR_SNANS (mode))
6291 return omit_one_operand (type, integer_zero_node, arg0);
6294 /* x <= +Inf is always true, if we don't case about NaNs. */
6295 if (! HONOR_NANS (mode))
6296 return omit_one_operand (type, integer_one_node, arg0);
6298 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6299 if (lang_hooks.decls.global_bindings_p () == 0
6300 && ! CONTAINS_PLACEHOLDER_P (arg0))
6302 arg0 = save_expr (arg0);
6303 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6309 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6310 real_maxval (&max, neg, mode);
6311 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6312 arg0, build_real (TREE_TYPE (arg0), max));
6315 /* x < +Inf is always equal to x <= DBL_MAX. */
6316 real_maxval (&max, neg, mode);
6317 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6318 arg0, build_real (TREE_TYPE (arg0), max));
6321 /* x != +Inf is always equal to !(x > DBL_MAX). */
6322 real_maxval (&max, neg, mode);
6323 if (! HONOR_NANS (mode))
6324 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6325 arg0, build_real (TREE_TYPE (arg0), max));
6327 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6328 arg0, build_real (TREE_TYPE (arg0), max));
6329 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6338 /* Subroutine of fold() that optimizes comparisons of a division by
6339 a nonzero integer constant against an integer constant, i.e.
6342 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6343 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6344 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6346 The function returns the constant folded tree if a simplification
6347 can be made, and NULL_TREE otherwise. */
6350 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6352 tree prod, tmp, hi, lo;
6353 tree arg00 = TREE_OPERAND (arg0, 0);
6354 tree arg01 = TREE_OPERAND (arg0, 1);
6355 unsigned HOST_WIDE_INT lpart;
6356 HOST_WIDE_INT hpart;
6357 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6361 /* We have to do this the hard way to detect unsigned overflow.
6362 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6363 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6364 TREE_INT_CST_HIGH (arg01),
6365 TREE_INT_CST_LOW (arg1),
6366 TREE_INT_CST_HIGH (arg1),
6367 &lpart, &hpart, unsigned_p);
6368 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6370 neg_overflow = false;
6374 tmp = int_const_binop (MINUS_EXPR, arg01,
6375 build_int_cst (TREE_TYPE (arg01), 1), 0);
6378 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6379 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6380 TREE_INT_CST_HIGH (prod),
6381 TREE_INT_CST_LOW (tmp),
6382 TREE_INT_CST_HIGH (tmp),
6383 &lpart, &hpart, unsigned_p);
6384 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6385 -1, overflow | TREE_OVERFLOW (prod));
6387 else if (tree_int_cst_sgn (arg01) >= 0)
6389 tmp = int_const_binop (MINUS_EXPR, arg01,
6390 build_int_cst (TREE_TYPE (arg01), 1), 0);
6391 switch (tree_int_cst_sgn (arg1))
6394 neg_overflow = true;
6395 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6400 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6405 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6415 /* A negative divisor reverses the relational operators. */
6416 code = swap_tree_comparison (code);
6418 tmp = int_const_binop (PLUS_EXPR, arg01,
6419 build_int_cst (TREE_TYPE (arg01), 1), 0);
6420 switch (tree_int_cst_sgn (arg1))
6423 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6428 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6433 neg_overflow = true;
6434 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6446 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6447 return omit_one_operand (type, integer_zero_node, arg00);
6448 if (TREE_OVERFLOW (hi))
6449 return fold_build2 (GE_EXPR, type, arg00, lo);
6450 if (TREE_OVERFLOW (lo))
6451 return fold_build2 (LE_EXPR, type, arg00, hi);
6452 return build_range_check (type, arg00, 1, lo, hi);
6455 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6456 return omit_one_operand (type, integer_one_node, arg00);
6457 if (TREE_OVERFLOW (hi))
6458 return fold_build2 (LT_EXPR, type, arg00, lo);
6459 if (TREE_OVERFLOW (lo))
6460 return fold_build2 (GT_EXPR, type, arg00, hi);
6461 return build_range_check (type, arg00, 0, lo, hi);
6464 if (TREE_OVERFLOW (lo))
6466 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6467 return omit_one_operand (type, tmp, arg00);
6469 return fold_build2 (LT_EXPR, type, arg00, lo);
6472 if (TREE_OVERFLOW (hi))
6474 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6475 return omit_one_operand (type, tmp, arg00);
6477 return fold_build2 (LE_EXPR, type, arg00, hi);
6480 if (TREE_OVERFLOW (hi))
6482 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6483 return omit_one_operand (type, tmp, arg00);
6485 return fold_build2 (GT_EXPR, type, arg00, hi);
6488 if (TREE_OVERFLOW (lo))
6490 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6491 return omit_one_operand (type, tmp, arg00);
6493 return fold_build2 (GE_EXPR, type, arg00, lo);
6503 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6504 equality/inequality test, then return a simplified form of the test
6505 using a sign testing. Otherwise return NULL. TYPE is the desired
6509 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6512 /* If this is testing a single bit, we can optimize the test. */
6513 if ((code == NE_EXPR || code == EQ_EXPR)
6514 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6515 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6517 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6518 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6519 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6521 if (arg00 != NULL_TREE
6522 /* This is only a win if casting to a signed type is cheap,
6523 i.e. when arg00's type is not a partial mode. */
6524 && TYPE_PRECISION (TREE_TYPE (arg00))
6525 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6527 tree stype = signed_type_for (TREE_TYPE (arg00));
6528 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6529 result_type, fold_convert (stype, arg00),
6530 build_int_cst (stype, 0));
6537 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6538 equality/inequality test, then return a simplified form of
6539 the test using shifts and logical operations. Otherwise return
6540 NULL. TYPE is the desired result type. */
6543 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6546 /* If this is testing a single bit, we can optimize the test. */
6547 if ((code == NE_EXPR || code == EQ_EXPR)
6548 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6549 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6551 tree inner = TREE_OPERAND (arg0, 0);
6552 tree type = TREE_TYPE (arg0);
6553 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6554 enum machine_mode operand_mode = TYPE_MODE (type);
6556 tree signed_type, unsigned_type, intermediate_type;
6559 /* First, see if we can fold the single bit test into a sign-bit
6561 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6566 /* Otherwise we have (A & C) != 0 where C is a single bit,
6567 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6568 Similarly for (A & C) == 0. */
6570 /* If INNER is a right shift of a constant and it plus BITNUM does
6571 not overflow, adjust BITNUM and INNER. */
6572 if (TREE_CODE (inner) == RSHIFT_EXPR
6573 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6574 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6575 && bitnum < TYPE_PRECISION (type)
6576 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6577 bitnum - TYPE_PRECISION (type)))
6579 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6580 inner = TREE_OPERAND (inner, 0);
6583 /* If we are going to be able to omit the AND below, we must do our
6584 operations as unsigned. If we must use the AND, we have a choice.
6585 Normally unsigned is faster, but for some machines signed is. */
6586 #ifdef LOAD_EXTEND_OP
6587 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6588 && !flag_syntax_only) ? 0 : 1;
6593 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6594 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6595 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6596 inner = fold_convert (intermediate_type, inner);
6599 inner = build2 (RSHIFT_EXPR, intermediate_type,
6600 inner, size_int (bitnum));
6602 one = build_int_cst (intermediate_type, 1);
6604 if (code == EQ_EXPR)
6605 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6607 /* Put the AND last so it can combine with more things. */
6608 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6610 /* Make sure to return the proper type. */
6611 inner = fold_convert (result_type, inner);
6618 /* Check whether we are allowed to reorder operands arg0 and arg1,
6619 such that the evaluation of arg1 occurs before arg0. */
6622 reorder_operands_p (const_tree arg0, const_tree arg1)
6624 if (! flag_evaluation_order)
6626 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6628 return ! TREE_SIDE_EFFECTS (arg0)
6629 && ! TREE_SIDE_EFFECTS (arg1);
6632 /* Test whether it is preferable two swap two operands, ARG0 and
6633 ARG1, for example because ARG0 is an integer constant and ARG1
6634 isn't. If REORDER is true, only recommend swapping if we can
6635 evaluate the operands in reverse order. */
6638 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6640 STRIP_SIGN_NOPS (arg0);
6641 STRIP_SIGN_NOPS (arg1);
6643 if (TREE_CODE (arg1) == INTEGER_CST)
6645 if (TREE_CODE (arg0) == INTEGER_CST)
6648 if (TREE_CODE (arg1) == REAL_CST)
6650 if (TREE_CODE (arg0) == REAL_CST)
6653 if (TREE_CODE (arg1) == FIXED_CST)
6655 if (TREE_CODE (arg0) == FIXED_CST)
6658 if (TREE_CODE (arg1) == COMPLEX_CST)
6660 if (TREE_CODE (arg0) == COMPLEX_CST)
6663 if (TREE_CONSTANT (arg1))
6665 if (TREE_CONSTANT (arg0))
6671 if (reorder && flag_evaluation_order
6672 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6675 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6676 for commutative and comparison operators. Ensuring a canonical
6677 form allows the optimizers to find additional redundancies without
6678 having to explicitly check for both orderings. */
6679 if (TREE_CODE (arg0) == SSA_NAME
6680 && TREE_CODE (arg1) == SSA_NAME
6681 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6684 /* Put SSA_NAMEs last. */
6685 if (TREE_CODE (arg1) == SSA_NAME)
6687 if (TREE_CODE (arg0) == SSA_NAME)
6690 /* Put variables last. */
6699 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6700 ARG0 is extended to a wider type. */
6703 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6705 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6707 tree shorter_type, outer_type;
6711 if (arg0_unw == arg0)
6713 shorter_type = TREE_TYPE (arg0_unw);
6715 #ifdef HAVE_canonicalize_funcptr_for_compare
6716 /* Disable this optimization if we're casting a function pointer
6717 type on targets that require function pointer canonicalization. */
6718 if (HAVE_canonicalize_funcptr_for_compare
6719 && TREE_CODE (shorter_type) == POINTER_TYPE
6720 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6724 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6727 arg1_unw = get_unwidened (arg1, NULL_TREE);
6729 /* If possible, express the comparison in the shorter mode. */
6730 if ((code == EQ_EXPR || code == NE_EXPR
6731 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6732 && (TREE_TYPE (arg1_unw) == shorter_type
6733 || (TYPE_PRECISION (shorter_type)
6734 > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6735 || ((TYPE_PRECISION (shorter_type)
6736 == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6737 && (TYPE_UNSIGNED (shorter_type)
6738 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6739 || (TREE_CODE (arg1_unw) == INTEGER_CST
6740 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6741 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6742 && int_fits_type_p (arg1_unw, shorter_type))))
6743 return fold_build2 (code, type, arg0_unw,
6744 fold_convert (shorter_type, arg1_unw));
6746 if (TREE_CODE (arg1_unw) != INTEGER_CST
6747 || TREE_CODE (shorter_type) != INTEGER_TYPE
6748 || !int_fits_type_p (arg1_unw, shorter_type))
6751 /* If we are comparing with the integer that does not fit into the range
6752 of the shorter type, the result is known. */
6753 outer_type = TREE_TYPE (arg1_unw);
6754 min = lower_bound_in_type (outer_type, shorter_type);
6755 max = upper_bound_in_type (outer_type, shorter_type);
6757 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6759 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6766 return omit_one_operand (type, integer_zero_node, arg0);
6771 return omit_one_operand (type, integer_one_node, arg0);
6777 return omit_one_operand (type, integer_one_node, arg0);
6779 return omit_one_operand (type, integer_zero_node, arg0);
6784 return omit_one_operand (type, integer_zero_node, arg0);
6786 return omit_one_operand (type, integer_one_node, arg0);
6795 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6796 ARG0 just the signedness is changed. */
6799 fold_sign_changed_comparison (enum tree_code code, tree type,
6800 tree arg0, tree arg1)
6803 tree inner_type, outer_type;
6805 if (!CONVERT_EXPR_P (arg0))
6808 outer_type = TREE_TYPE (arg0);
6809 arg0_inner = TREE_OPERAND (arg0, 0);
6810 inner_type = TREE_TYPE (arg0_inner);
6812 #ifdef HAVE_canonicalize_funcptr_for_compare
6813 /* Disable this optimization if we're casting a function pointer
6814 type on targets that require function pointer canonicalization. */
6815 if (HAVE_canonicalize_funcptr_for_compare
6816 && TREE_CODE (inner_type) == POINTER_TYPE
6817 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6821 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6824 /* If the conversion is from an integral subtype to its basetype
6826 if (TREE_TYPE (inner_type) == outer_type)
6829 if (TREE_CODE (arg1) != INTEGER_CST
6830 && !(CONVERT_EXPR_P (arg1)
6831 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6834 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6839 if (TREE_CODE (arg1) == INTEGER_CST)
6840 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6841 TREE_INT_CST_HIGH (arg1), 0,
6842 TREE_OVERFLOW (arg1));
6844 arg1 = fold_convert (inner_type, arg1);
6846 return fold_build2 (code, type, arg0_inner, arg1);
6849 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6850 step of the array. Reconstructs s and delta in the case of s * delta
6851 being an integer constant (and thus already folded).
6852 ADDR is the address. MULT is the multiplicative expression.
6853 If the function succeeds, the new address expression is returned. Otherwise
6854 NULL_TREE is returned. */
6857 try_move_mult_to_index (tree addr, tree op1)
6859 tree s, delta, step;
6860 tree ref = TREE_OPERAND (addr, 0), pref;
6865 /* Strip the nops that might be added when converting op1 to sizetype. */
6868 /* Canonicalize op1 into a possibly non-constant delta
6869 and an INTEGER_CST s. */
6870 if (TREE_CODE (op1) == MULT_EXPR)
6872 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6877 if (TREE_CODE (arg0) == INTEGER_CST)
6882 else if (TREE_CODE (arg1) == INTEGER_CST)
6890 else if (TREE_CODE (op1) == INTEGER_CST)
6897 /* Simulate we are delta * 1. */
6899 s = integer_one_node;
6902 for (;; ref = TREE_OPERAND (ref, 0))
6904 if (TREE_CODE (ref) == ARRAY_REF)
6906 /* Remember if this was a multi-dimensional array. */
6907 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6910 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6914 step = array_ref_element_size (ref);
6915 if (TREE_CODE (step) != INTEGER_CST)
6920 if (! tree_int_cst_equal (step, s))
6925 /* Try if delta is a multiple of step. */
6926 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6932 /* Only fold here if we can verify we do not overflow one
6933 dimension of a multi-dimensional array. */
6938 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6939 || !INTEGRAL_TYPE_P (itype)
6940 || !TYPE_MAX_VALUE (itype)
6941 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6944 tmp = fold_binary (PLUS_EXPR, itype,
6945 fold_convert (itype,
6946 TREE_OPERAND (ref, 1)),
6947 fold_convert (itype, delta));
6949 || TREE_CODE (tmp) != INTEGER_CST
6950 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6959 if (!handled_component_p (ref))
6963 /* We found the suitable array reference. So copy everything up to it,
6964 and replace the index. */
6966 pref = TREE_OPERAND (addr, 0);
6967 ret = copy_node (pref);
6972 pref = TREE_OPERAND (pref, 0);
6973 TREE_OPERAND (pos, 0) = copy_node (pref);
6974 pos = TREE_OPERAND (pos, 0);
6977 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6978 fold_convert (itype,
6979 TREE_OPERAND (pos, 1)),
6980 fold_convert (itype, delta));
6982 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6986 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6987 means A >= Y && A != MAX, but in this case we know that
6988 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6991 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6993 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6995 if (TREE_CODE (bound) == LT_EXPR)
6996 a = TREE_OPERAND (bound, 0);
6997 else if (TREE_CODE (bound) == GT_EXPR)
6998 a = TREE_OPERAND (bound, 1);
7002 typea = TREE_TYPE (a);
7003 if (!INTEGRAL_TYPE_P (typea)
7004 && !POINTER_TYPE_P (typea))
7007 if (TREE_CODE (ineq) == LT_EXPR)
7009 a1 = TREE_OPERAND (ineq, 1);
7010 y = TREE_OPERAND (ineq, 0);
7012 else if (TREE_CODE (ineq) == GT_EXPR)
7014 a1 = TREE_OPERAND (ineq, 0);
7015 y = TREE_OPERAND (ineq, 1);
7020 if (TREE_TYPE (a1) != typea)
7023 if (POINTER_TYPE_P (typea))
7025 /* Convert the pointer types into integer before taking the difference. */
7026 tree ta = fold_convert (ssizetype, a);
7027 tree ta1 = fold_convert (ssizetype, a1);
7028 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7031 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7033 if (!diff || !integer_onep (diff))
7036 return fold_build2 (GE_EXPR, type, a, y);
7039 /* Fold a sum or difference of at least one multiplication.
7040 Returns the folded tree or NULL if no simplification could be made. */
7043 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7045 tree arg00, arg01, arg10, arg11;
7046 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7048 /* (A * C) +- (B * C) -> (A+-B) * C.
7049 (A * C) +- A -> A * (C+-1).
7050 We are most concerned about the case where C is a constant,
7051 but other combinations show up during loop reduction. Since
7052 it is not difficult, try all four possibilities. */
7054 if (TREE_CODE (arg0) == MULT_EXPR)
7056 arg00 = TREE_OPERAND (arg0, 0);
7057 arg01 = TREE_OPERAND (arg0, 1);
7059 else if (TREE_CODE (arg0) == INTEGER_CST)
7061 arg00 = build_one_cst (type);
7066 /* We cannot generate constant 1 for fract. */
7067 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7070 arg01 = build_one_cst (type);
7072 if (TREE_CODE (arg1) == MULT_EXPR)
7074 arg10 = TREE_OPERAND (arg1, 0);
7075 arg11 = TREE_OPERAND (arg1, 1);
7077 else if (TREE_CODE (arg1) == INTEGER_CST)
7079 arg10 = build_one_cst (type);
7084 /* We cannot generate constant 1 for fract. */
7085 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7088 arg11 = build_one_cst (type);
7092 if (operand_equal_p (arg01, arg11, 0))
7093 same = arg01, alt0 = arg00, alt1 = arg10;
7094 else if (operand_equal_p (arg00, arg10, 0))
7095 same = arg00, alt0 = arg01, alt1 = arg11;
7096 else if (operand_equal_p (arg00, arg11, 0))
7097 same = arg00, alt0 = arg01, alt1 = arg10;
7098 else if (operand_equal_p (arg01, arg10, 0))
7099 same = arg01, alt0 = arg00, alt1 = arg11;
7101 /* No identical multiplicands; see if we can find a common
7102 power-of-two factor in non-power-of-two multiplies. This
7103 can help in multi-dimensional array access. */
7104 else if (host_integerp (arg01, 0)
7105 && host_integerp (arg11, 0))
7107 HOST_WIDE_INT int01, int11, tmp;
7110 int01 = TREE_INT_CST_LOW (arg01);
7111 int11 = TREE_INT_CST_LOW (arg11);
7113 /* Move min of absolute values to int11. */
7114 if ((int01 >= 0 ? int01 : -int01)
7115 < (int11 >= 0 ? int11 : -int11))
7117 tmp = int01, int01 = int11, int11 = tmp;
7118 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7125 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7127 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7128 build_int_cst (TREE_TYPE (arg00),
7133 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7138 return fold_build2 (MULT_EXPR, type,
7139 fold_build2 (code, type,
7140 fold_convert (type, alt0),
7141 fold_convert (type, alt1)),
7142 fold_convert (type, same));
7147 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7148 specified by EXPR into the buffer PTR of length LEN bytes.
7149 Return the number of bytes placed in the buffer, or zero
7153 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7155 tree type = TREE_TYPE (expr);
7156 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7157 int byte, offset, word, words;
7158 unsigned char value;
7160 if (total_bytes > len)
7162 words = total_bytes / UNITS_PER_WORD;
7164 for (byte = 0; byte < total_bytes; byte++)
7166 int bitpos = byte * BITS_PER_UNIT;
7167 if (bitpos < HOST_BITS_PER_WIDE_INT)
7168 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7170 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7171 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7173 if (total_bytes > UNITS_PER_WORD)
7175 word = byte / UNITS_PER_WORD;
7176 if (WORDS_BIG_ENDIAN)
7177 word = (words - 1) - word;
7178 offset = word * UNITS_PER_WORD;
7179 if (BYTES_BIG_ENDIAN)
7180 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7182 offset += byte % UNITS_PER_WORD;
7185 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7186 ptr[offset] = value;
7192 /* Subroutine of native_encode_expr. Encode the REAL_CST
7193 specified by EXPR into the buffer PTR of length LEN bytes.
7194 Return the number of bytes placed in the buffer, or zero
7198 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7200 tree type = TREE_TYPE (expr);
7201 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7202 int byte, offset, word, words, bitpos;
7203 unsigned char value;
7205 /* There are always 32 bits in each long, no matter the size of
7206 the hosts long. We handle floating point representations with
7210 if (total_bytes > len)
7212 words = 32 / UNITS_PER_WORD;
7214 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7216 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7217 bitpos += BITS_PER_UNIT)
7219 byte = (bitpos / BITS_PER_UNIT) & 3;
7220 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7222 if (UNITS_PER_WORD < 4)
7224 word = byte / UNITS_PER_WORD;
7225 if (WORDS_BIG_ENDIAN)
7226 word = (words - 1) - word;
7227 offset = word * UNITS_PER_WORD;
7228 if (BYTES_BIG_ENDIAN)
7229 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7231 offset += byte % UNITS_PER_WORD;
7234 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7235 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7240 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7241 specified by EXPR into the buffer PTR of length LEN bytes.
7242 Return the number of bytes placed in the buffer, or zero
7246 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7251 part = TREE_REALPART (expr);
7252 rsize = native_encode_expr (part, ptr, len);
7255 part = TREE_IMAGPART (expr);
7256 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7259 return rsize + isize;
7263 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7264 specified by EXPR into the buffer PTR of length LEN bytes.
7265 Return the number of bytes placed in the buffer, or zero
7269 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7271 int i, size, offset, count;
7272 tree itype, elem, elements;
7275 elements = TREE_VECTOR_CST_ELTS (expr);
7276 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7277 itype = TREE_TYPE (TREE_TYPE (expr));
7278 size = GET_MODE_SIZE (TYPE_MODE (itype));
7279 for (i = 0; i < count; i++)
7283 elem = TREE_VALUE (elements);
7284 elements = TREE_CHAIN (elements);
7291 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7296 if (offset + size > len)
7298 memset (ptr+offset, 0, size);
7306 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7307 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7308 buffer PTR of length LEN bytes. Return the number of bytes
7309 placed in the buffer, or zero upon failure. */
7312 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7314 switch (TREE_CODE (expr))
7317 return native_encode_int (expr, ptr, len);
7320 return native_encode_real (expr, ptr, len);
7323 return native_encode_complex (expr, ptr, len);
7326 return native_encode_vector (expr, ptr, len);
7334 /* Subroutine of native_interpret_expr. Interpret the contents of
7335 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7336 If the buffer cannot be interpreted, return NULL_TREE. */
7339 native_interpret_int (tree type, const unsigned char *ptr, int len)
7341 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7342 int byte, offset, word, words;
7343 unsigned char value;
7344 unsigned int HOST_WIDE_INT lo = 0;
7345 HOST_WIDE_INT hi = 0;
7347 if (total_bytes > len)
7349 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7351 words = total_bytes / UNITS_PER_WORD;
7353 for (byte = 0; byte < total_bytes; byte++)
7355 int bitpos = byte * BITS_PER_UNIT;
7356 if (total_bytes > UNITS_PER_WORD)
7358 word = byte / UNITS_PER_WORD;
7359 if (WORDS_BIG_ENDIAN)
7360 word = (words - 1) - word;
7361 offset = word * UNITS_PER_WORD;
7362 if (BYTES_BIG_ENDIAN)
7363 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7365 offset += byte % UNITS_PER_WORD;
7368 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7369 value = ptr[offset];
7371 if (bitpos < HOST_BITS_PER_WIDE_INT)
7372 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7374 hi |= (unsigned HOST_WIDE_INT) value
7375 << (bitpos - HOST_BITS_PER_WIDE_INT);
7378 return build_int_cst_wide_type (type, lo, hi);
7382 /* Subroutine of native_interpret_expr. Interpret the contents of
7383 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7384 If the buffer cannot be interpreted, return NULL_TREE. */
7387 native_interpret_real (tree type, const unsigned char *ptr, int len)
7389 enum machine_mode mode = TYPE_MODE (type);
7390 int total_bytes = GET_MODE_SIZE (mode);
7391 int byte, offset, word, words, bitpos;
7392 unsigned char value;
7393 /* There are always 32 bits in each long, no matter the size of
7394 the hosts long. We handle floating point representations with
7399 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7400 if (total_bytes > len || total_bytes > 24)
7402 words = 32 / UNITS_PER_WORD;
7404 memset (tmp, 0, sizeof (tmp));
7405 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7406 bitpos += BITS_PER_UNIT)
7408 byte = (bitpos / BITS_PER_UNIT) & 3;
7409 if (UNITS_PER_WORD < 4)
7411 word = byte / UNITS_PER_WORD;
7412 if (WORDS_BIG_ENDIAN)
7413 word = (words - 1) - word;
7414 offset = word * UNITS_PER_WORD;
7415 if (BYTES_BIG_ENDIAN)
7416 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7418 offset += byte % UNITS_PER_WORD;
7421 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7422 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7424 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7427 real_from_target (&r, tmp, mode);
7428 return build_real (type, r);
7432 /* Subroutine of native_interpret_expr. Interpret the contents of
7433 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7434 If the buffer cannot be interpreted, return NULL_TREE. */
7437 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7439 tree etype, rpart, ipart;
7442 etype = TREE_TYPE (type);
7443 size = GET_MODE_SIZE (TYPE_MODE (etype));
7446 rpart = native_interpret_expr (etype, ptr, size);
7449 ipart = native_interpret_expr (etype, ptr+size, size);
7452 return build_complex (type, rpart, ipart);
7456 /* Subroutine of native_interpret_expr. Interpret the contents of
7457 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7458 If the buffer cannot be interpreted, return NULL_TREE. */
7461 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7463 tree etype, elem, elements;
7466 etype = TREE_TYPE (type);
7467 size = GET_MODE_SIZE (TYPE_MODE (etype));
7468 count = TYPE_VECTOR_SUBPARTS (type);
7469 if (size * count > len)
7472 elements = NULL_TREE;
7473 for (i = count - 1; i >= 0; i--)
7475 elem = native_interpret_expr (etype, ptr+(i*size), size);
7478 elements = tree_cons (NULL_TREE, elem, elements);
7480 return build_vector (type, elements);
7484 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7485 the buffer PTR of length LEN as a constant of type TYPE. For
7486 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7487 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7488 return NULL_TREE. */
7491 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7493 switch (TREE_CODE (type))
7498 return native_interpret_int (type, ptr, len);
7501 return native_interpret_real (type, ptr, len);
7504 return native_interpret_complex (type, ptr, len);
7507 return native_interpret_vector (type, ptr, len);
7515 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7516 TYPE at compile-time. If we're unable to perform the conversion
7517 return NULL_TREE. */
7520 fold_view_convert_expr (tree type, tree expr)
7522 /* We support up to 512-bit values (for V8DFmode). */
7523 unsigned char buffer[64];
7526 /* Check that the host and target are sane. */
7527 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7530 len = native_encode_expr (expr, buffer, sizeof (buffer));
7534 return native_interpret_expr (type, buffer, len);
7537 /* Build an expression for the address of T. Folds away INDIRECT_REF
7538 to avoid confusing the gimplify process. When IN_FOLD is true
7539 avoid modifications of T. */
7542 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7544 /* The size of the object is not relevant when talking about its address. */
7545 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7546 t = TREE_OPERAND (t, 0);
7548 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7549 if (TREE_CODE (t) == INDIRECT_REF
7550 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7552 t = TREE_OPERAND (t, 0);
7554 if (TREE_TYPE (t) != ptrtype)
7555 t = build1 (NOP_EXPR, ptrtype, t);
7561 while (handled_component_p (base))
7562 base = TREE_OPERAND (base, 0);
7565 TREE_ADDRESSABLE (base) = 1;
7567 t = build1 (ADDR_EXPR, ptrtype, t);
7570 t = build1 (ADDR_EXPR, ptrtype, t);
7575 /* Build an expression for the address of T with type PTRTYPE. This
7576 function modifies the input parameter 'T' by sometimes setting the
7577 TREE_ADDRESSABLE flag. */
7580 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7582 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7585 /* Build an expression for the address of T. This function modifies
7586 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7587 flag. When called from fold functions, use fold_addr_expr instead. */
7590 build_fold_addr_expr (tree t)
7592 return build_fold_addr_expr_with_type_1 (t,
7593 build_pointer_type (TREE_TYPE (t)),
7597 /* Same as build_fold_addr_expr, builds an expression for the address
7598 of T, but avoids touching the input node 't'. Fold functions
7599 should use this version. */
7602 fold_addr_expr (tree t)
7604 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7606 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7609 /* Fold a unary expression of code CODE and type TYPE with operand
7610 OP0. Return the folded expression if folding is successful.
7611 Otherwise, return NULL_TREE. */
7614 fold_unary (enum tree_code code, tree type, tree op0)
7618 enum tree_code_class kind = TREE_CODE_CLASS (code);
7620 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7621 && TREE_CODE_LENGTH (code) == 1);
7626 if (code == NOP_EXPR || code == CONVERT_EXPR
7627 || code == FLOAT_EXPR || code == ABS_EXPR)
7629 /* Don't use STRIP_NOPS, because signedness of argument type
7631 STRIP_SIGN_NOPS (arg0);
7635 /* Strip any conversions that don't change the mode. This
7636 is safe for every expression, except for a comparison
7637 expression because its signedness is derived from its
7640 Note that this is done as an internal manipulation within
7641 the constant folder, in order to find the simplest
7642 representation of the arguments so that their form can be
7643 studied. In any cases, the appropriate type conversions
7644 should be put back in the tree that will get out of the
7650 if (TREE_CODE_CLASS (code) == tcc_unary)
7652 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7653 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7654 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7655 else if (TREE_CODE (arg0) == COND_EXPR)
7657 tree arg01 = TREE_OPERAND (arg0, 1);
7658 tree arg02 = TREE_OPERAND (arg0, 2);
7659 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7660 arg01 = fold_build1 (code, type, arg01);
7661 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7662 arg02 = fold_build1 (code, type, arg02);
7663 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7666 /* If this was a conversion, and all we did was to move into
7667 inside the COND_EXPR, bring it back out. But leave it if
7668 it is a conversion from integer to integer and the
7669 result precision is no wider than a word since such a
7670 conversion is cheap and may be optimized away by combine,
7671 while it couldn't if it were outside the COND_EXPR. Then return
7672 so we don't get into an infinite recursion loop taking the
7673 conversion out and then back in. */
7675 if ((code == NOP_EXPR || code == CONVERT_EXPR
7676 || code == NON_LVALUE_EXPR)
7677 && TREE_CODE (tem) == COND_EXPR
7678 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7679 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7680 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7681 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7682 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7683 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7684 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7686 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7687 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7688 || flag_syntax_only))
7689 tem = build1 (code, type,
7691 TREE_TYPE (TREE_OPERAND
7692 (TREE_OPERAND (tem, 1), 0)),
7693 TREE_OPERAND (tem, 0),
7694 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7695 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7698 else if (COMPARISON_CLASS_P (arg0))
7700 if (TREE_CODE (type) == BOOLEAN_TYPE)
7702 arg0 = copy_node (arg0);
7703 TREE_TYPE (arg0) = type;
7706 else if (TREE_CODE (type) != INTEGER_TYPE)
7707 return fold_build3 (COND_EXPR, type, arg0,
7708 fold_build1 (code, type,
7710 fold_build1 (code, type,
7711 integer_zero_node));
7718 /* Re-association barriers around constants and other re-association
7719 barriers can be removed. */
7720 if (CONSTANT_CLASS_P (op0)
7721 || TREE_CODE (op0) == PAREN_EXPR)
7722 return fold_convert (type, op0);
7727 case FIX_TRUNC_EXPR:
7728 if (TREE_TYPE (op0) == type)
7731 /* If we have (type) (a CMP b) and type is an integral type, return
7732 new expression involving the new type. */
7733 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7734 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7735 TREE_OPERAND (op0, 1));
7737 /* Handle cases of two conversions in a row. */
7738 if (CONVERT_EXPR_P (op0))
7740 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7741 tree inter_type = TREE_TYPE (op0);
7742 int inside_int = INTEGRAL_TYPE_P (inside_type);
7743 int inside_ptr = POINTER_TYPE_P (inside_type);
7744 int inside_float = FLOAT_TYPE_P (inside_type);
7745 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7746 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7747 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7748 int inter_int = INTEGRAL_TYPE_P (inter_type);
7749 int inter_ptr = POINTER_TYPE_P (inter_type);
7750 int inter_float = FLOAT_TYPE_P (inter_type);
7751 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7752 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7753 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7754 int final_int = INTEGRAL_TYPE_P (type);
7755 int final_ptr = POINTER_TYPE_P (type);
7756 int final_float = FLOAT_TYPE_P (type);
7757 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7758 unsigned int final_prec = TYPE_PRECISION (type);
7759 int final_unsignedp = TYPE_UNSIGNED (type);
7761 /* In addition to the cases of two conversions in a row
7762 handled below, if we are converting something to its own
7763 type via an object of identical or wider precision, neither
7764 conversion is needed. */
7765 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7766 && (((inter_int || inter_ptr) && final_int)
7767 || (inter_float && final_float))
7768 && inter_prec >= final_prec)
7769 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7771 /* Likewise, if the intermediate and final types are either both
7772 float or both integer, we don't need the middle conversion if
7773 it is wider than the final type and doesn't change the signedness
7774 (for integers). Avoid this if the final type is a pointer
7775 since then we sometimes need the inner conversion. Likewise if
7776 the outer has a precision not equal to the size of its mode. */
7777 if (((inter_int && inside_int)
7778 || (inter_float && inside_float)
7779 || (inter_vec && inside_vec))
7780 && inter_prec >= inside_prec
7781 && (inter_float || inter_vec
7782 || inter_unsignedp == inside_unsignedp)
7783 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7784 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7786 && (! final_vec || inter_prec == inside_prec))
7787 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7789 /* If we have a sign-extension of a zero-extended value, we can
7790 replace that by a single zero-extension. */
7791 if (inside_int && inter_int && final_int
7792 && inside_prec < inter_prec && inter_prec < final_prec
7793 && inside_unsignedp && !inter_unsignedp)
7794 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7796 /* Two conversions in a row are not needed unless:
7797 - some conversion is floating-point (overstrict for now), or
7798 - some conversion is a vector (overstrict for now), or
7799 - the intermediate type is narrower than both initial and
7801 - the intermediate type and innermost type differ in signedness,
7802 and the outermost type is wider than the intermediate, or
7803 - the initial type is a pointer type and the precisions of the
7804 intermediate and final types differ, or
7805 - the final type is a pointer type and the precisions of the
7806 initial and intermediate types differ. */
7807 if (! inside_float && ! inter_float && ! final_float
7808 && ! inside_vec && ! inter_vec && ! final_vec
7809 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7810 && ! (inside_int && inter_int
7811 && inter_unsignedp != inside_unsignedp
7812 && inter_prec < final_prec)
7813 && ((inter_unsignedp && inter_prec > inside_prec)
7814 == (final_unsignedp && final_prec > inter_prec))
7815 && ! (inside_ptr && inter_prec != final_prec)
7816 && ! (final_ptr && inside_prec != inter_prec)
7817 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7818 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7819 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7822 /* Handle (T *)&A.B.C for A being of type T and B and C
7823 living at offset zero. This occurs frequently in
7824 C++ upcasting and then accessing the base. */
7825 if (TREE_CODE (op0) == ADDR_EXPR
7826 && POINTER_TYPE_P (type)
7827 && handled_component_p (TREE_OPERAND (op0, 0)))
7829 HOST_WIDE_INT bitsize, bitpos;
7831 enum machine_mode mode;
7832 int unsignedp, volatilep;
7833 tree base = TREE_OPERAND (op0, 0);
7834 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7835 &mode, &unsignedp, &volatilep, false);
7836 /* If the reference was to a (constant) zero offset, we can use
7837 the address of the base if it has the same base type
7838 as the result type. */
7839 if (! offset && bitpos == 0
7840 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7841 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7842 return fold_convert (type, fold_addr_expr (base));
7845 if ((TREE_CODE (op0) == MODIFY_EXPR
7846 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7847 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7848 /* Detect assigning a bitfield. */
7849 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7851 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7853 /* Don't leave an assignment inside a conversion
7854 unless assigning a bitfield. */
7855 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7856 /* First do the assignment, then return converted constant. */
7857 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7858 TREE_NO_WARNING (tem) = 1;
7859 TREE_USED (tem) = 1;
7863 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7864 constants (if x has signed type, the sign bit cannot be set
7865 in c). This folds extension into the BIT_AND_EXPR. */
7866 if (INTEGRAL_TYPE_P (type)
7867 && TREE_CODE (type) != BOOLEAN_TYPE
7868 && TREE_CODE (op0) == BIT_AND_EXPR
7869 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7872 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7875 if (TYPE_UNSIGNED (TREE_TYPE (and))
7876 || (TYPE_PRECISION (type)
7877 <= TYPE_PRECISION (TREE_TYPE (and))))
7879 else if (TYPE_PRECISION (TREE_TYPE (and1))
7880 <= HOST_BITS_PER_WIDE_INT
7881 && host_integerp (and1, 1))
7883 unsigned HOST_WIDE_INT cst;
7885 cst = tree_low_cst (and1, 1);
7886 cst &= (HOST_WIDE_INT) -1
7887 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7888 change = (cst == 0);
7889 #ifdef LOAD_EXTEND_OP
7891 && !flag_syntax_only
7892 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7895 tree uns = unsigned_type_for (TREE_TYPE (and0));
7896 and0 = fold_convert (uns, and0);
7897 and1 = fold_convert (uns, and1);
7903 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7904 TREE_INT_CST_HIGH (and1), 0,
7905 TREE_OVERFLOW (and1));
7906 return fold_build2 (BIT_AND_EXPR, type,
7907 fold_convert (type, and0), tem);
7911 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7912 when one of the new casts will fold away. Conservatively we assume
7913 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7914 if (POINTER_TYPE_P (type)
7915 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7916 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7917 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7918 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7920 tree arg00 = TREE_OPERAND (arg0, 0);
7921 tree arg01 = TREE_OPERAND (arg0, 1);
7923 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7924 fold_convert (sizetype, arg01));
7927 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7928 of the same precision, and X is an integer type not narrower than
7929 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7930 if (INTEGRAL_TYPE_P (type)
7931 && TREE_CODE (op0) == BIT_NOT_EXPR
7932 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7933 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7934 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7936 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7937 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7938 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7939 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7942 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7943 type of X and Y (integer types only). */
7944 if (INTEGRAL_TYPE_P (type)
7945 && TREE_CODE (op0) == MULT_EXPR
7946 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7947 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7949 /* Be careful not to introduce new overflows. */
7951 if (TYPE_OVERFLOW_WRAPS (type))
7954 mult_type = unsigned_type_for (type);
7956 tem = fold_build2 (MULT_EXPR, mult_type,
7957 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7958 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7959 return fold_convert (type, tem);
7962 tem = fold_convert_const (code, type, op0);
7963 return tem ? tem : NULL_TREE;
7965 case FIXED_CONVERT_EXPR:
7966 tem = fold_convert_const (code, type, arg0);
7967 return tem ? tem : NULL_TREE;
7969 case VIEW_CONVERT_EXPR:
7970 if (TREE_TYPE (op0) == type)
7972 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7973 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7975 /* For integral conversions with the same precision or pointer
7976 conversions use a NOP_EXPR instead. */
7977 if ((INTEGRAL_TYPE_P (type)
7978 || POINTER_TYPE_P (type))
7979 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7980 || POINTER_TYPE_P (TREE_TYPE (op0)))
7981 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7982 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7983 a sub-type to its base type as generated by the Ada FE. */
7984 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7985 && TREE_TYPE (TREE_TYPE (op0))))
7986 return fold_convert (type, op0);
7988 /* Strip inner integral conversions that do not change the precision. */
7989 if (CONVERT_EXPR_P (op0)
7990 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7991 || POINTER_TYPE_P (TREE_TYPE (op0)))
7992 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7993 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7994 && (TYPE_PRECISION (TREE_TYPE (op0))
7995 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7996 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7998 return fold_view_convert_expr (type, op0);
8001 tem = fold_negate_expr (arg0);
8003 return fold_convert (type, tem);
8007 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8008 return fold_abs_const (arg0, type);
8009 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8010 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8011 /* Convert fabs((double)float) into (double)fabsf(float). */
8012 else if (TREE_CODE (arg0) == NOP_EXPR
8013 && TREE_CODE (type) == REAL_TYPE)
8015 tree targ0 = strip_float_extensions (arg0);
8017 return fold_convert (type, fold_build1 (ABS_EXPR,
8021 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8022 else if (TREE_CODE (arg0) == ABS_EXPR)
8024 else if (tree_expr_nonnegative_p (arg0))
8027 /* Strip sign ops from argument. */
8028 if (TREE_CODE (type) == REAL_TYPE)
8030 tem = fold_strip_sign_ops (arg0);
8032 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8037 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8038 return fold_convert (type, arg0);
8039 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8041 tree itype = TREE_TYPE (type);
8042 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8043 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8044 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8046 if (TREE_CODE (arg0) == COMPLEX_CST)
8048 tree itype = TREE_TYPE (type);
8049 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8050 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8051 return build_complex (type, rpart, negate_expr (ipart));
8053 if (TREE_CODE (arg0) == CONJ_EXPR)
8054 return fold_convert (type, TREE_OPERAND (arg0, 0));
8058 if (TREE_CODE (arg0) == INTEGER_CST)
8059 return fold_not_const (arg0, type);
8060 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8061 return fold_convert (type, TREE_OPERAND (arg0, 0));
8062 /* Convert ~ (-A) to A - 1. */
8063 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8064 return fold_build2 (MINUS_EXPR, type,
8065 fold_convert (type, TREE_OPERAND (arg0, 0)),
8066 build_int_cst (type, 1));
8067 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8068 else if (INTEGRAL_TYPE_P (type)
8069 && ((TREE_CODE (arg0) == MINUS_EXPR
8070 && integer_onep (TREE_OPERAND (arg0, 1)))
8071 || (TREE_CODE (arg0) == PLUS_EXPR
8072 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8073 return fold_build1 (NEGATE_EXPR, type,
8074 fold_convert (type, TREE_OPERAND (arg0, 0)));
8075 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8076 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8077 && (tem = fold_unary (BIT_NOT_EXPR, type,
8079 TREE_OPERAND (arg0, 0)))))
8080 return fold_build2 (BIT_XOR_EXPR, type, tem,
8081 fold_convert (type, TREE_OPERAND (arg0, 1)));
8082 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8083 && (tem = fold_unary (BIT_NOT_EXPR, type,
8085 TREE_OPERAND (arg0, 1)))))
8086 return fold_build2 (BIT_XOR_EXPR, type,
8087 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8088 /* Perform BIT_NOT_EXPR on each element individually. */
8089 else if (TREE_CODE (arg0) == VECTOR_CST)
8091 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8092 int count = TYPE_VECTOR_SUBPARTS (type), i;
8094 for (i = 0; i < count; i++)
8098 elem = TREE_VALUE (elements);
8099 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8100 if (elem == NULL_TREE)
8102 elements = TREE_CHAIN (elements);
8105 elem = build_int_cst (TREE_TYPE (type), -1);
8106 list = tree_cons (NULL_TREE, elem, list);
8109 return build_vector (type, nreverse (list));
8114 case TRUTH_NOT_EXPR:
8115 /* The argument to invert_truthvalue must have Boolean type. */
8116 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8117 arg0 = fold_convert (boolean_type_node, arg0);
8119 /* Note that the operand of this must be an int
8120 and its values must be 0 or 1.
8121 ("true" is a fixed value perhaps depending on the language,
8122 but we don't handle values other than 1 correctly yet.) */
8123 tem = fold_truth_not_expr (arg0);
8126 return fold_convert (type, tem);
8129 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8130 return fold_convert (type, arg0);
8131 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8132 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8133 TREE_OPERAND (arg0, 1));
8134 if (TREE_CODE (arg0) == COMPLEX_CST)
8135 return fold_convert (type, TREE_REALPART (arg0));
8136 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8138 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8139 tem = fold_build2 (TREE_CODE (arg0), itype,
8140 fold_build1 (REALPART_EXPR, itype,
8141 TREE_OPERAND (arg0, 0)),
8142 fold_build1 (REALPART_EXPR, itype,
8143 TREE_OPERAND (arg0, 1)));
8144 return fold_convert (type, tem);
8146 if (TREE_CODE (arg0) == CONJ_EXPR)
8148 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8149 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8150 return fold_convert (type, tem);
8152 if (TREE_CODE (arg0) == CALL_EXPR)
8154 tree fn = get_callee_fndecl (arg0);
8155 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8156 switch (DECL_FUNCTION_CODE (fn))
8158 CASE_FLT_FN (BUILT_IN_CEXPI):
8159 fn = mathfn_built_in (type, BUILT_IN_COS);
8161 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8171 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8172 return fold_convert (type, integer_zero_node);
8173 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8174 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8175 TREE_OPERAND (arg0, 0));
8176 if (TREE_CODE (arg0) == COMPLEX_CST)
8177 return fold_convert (type, TREE_IMAGPART (arg0));
8178 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8180 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8181 tem = fold_build2 (TREE_CODE (arg0), itype,
8182 fold_build1 (IMAGPART_EXPR, itype,
8183 TREE_OPERAND (arg0, 0)),
8184 fold_build1 (IMAGPART_EXPR, itype,
8185 TREE_OPERAND (arg0, 1)));
8186 return fold_convert (type, tem);
8188 if (TREE_CODE (arg0) == CONJ_EXPR)
8190 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8191 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8192 return fold_convert (type, negate_expr (tem));
8194 if (TREE_CODE (arg0) == CALL_EXPR)
8196 tree fn = get_callee_fndecl (arg0);
8197 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8198 switch (DECL_FUNCTION_CODE (fn))
8200 CASE_FLT_FN (BUILT_IN_CEXPI):
8201 fn = mathfn_built_in (type, BUILT_IN_SIN);
8203 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8214 } /* switch (code) */
8217 /* Fold a binary expression of code CODE and type TYPE with operands
8218 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8219 Return the folded expression if folding is successful. Otherwise,
8220 return NULL_TREE. */
8223 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8225 enum tree_code compl_code;
8227 if (code == MIN_EXPR)
8228 compl_code = MAX_EXPR;
8229 else if (code == MAX_EXPR)
8230 compl_code = MIN_EXPR;
8234 /* MIN (MAX (a, b), b) == b. */
8235 if (TREE_CODE (op0) == compl_code
8236 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8237 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8239 /* MIN (MAX (b, a), b) == b. */
8240 if (TREE_CODE (op0) == compl_code
8241 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8242 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8243 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8245 /* MIN (a, MAX (a, b)) == a. */
8246 if (TREE_CODE (op1) == compl_code
8247 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8248 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8249 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8251 /* MIN (a, MAX (b, a)) == a. */
8252 if (TREE_CODE (op1) == compl_code
8253 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8254 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8255 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8260 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8261 by changing CODE to reduce the magnitude of constants involved in
8262 ARG0 of the comparison.
8263 Returns a canonicalized comparison tree if a simplification was
8264 possible, otherwise returns NULL_TREE.
8265 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8266 valid if signed overflow is undefined. */
8269 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8270 tree arg0, tree arg1,
8271 bool *strict_overflow_p)
8273 enum tree_code code0 = TREE_CODE (arg0);
8274 tree t, cst0 = NULL_TREE;
8278 /* Match A +- CST code arg1 and CST code arg1. */
8279 if (!(((code0 == MINUS_EXPR
8280 || code0 == PLUS_EXPR)
8281 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8282 || code0 == INTEGER_CST))
8285 /* Identify the constant in arg0 and its sign. */
8286 if (code0 == INTEGER_CST)
8289 cst0 = TREE_OPERAND (arg0, 1);
8290 sgn0 = tree_int_cst_sgn (cst0);
8292 /* Overflowed constants and zero will cause problems. */
8293 if (integer_zerop (cst0)
8294 || TREE_OVERFLOW (cst0))
8297 /* See if we can reduce the magnitude of the constant in
8298 arg0 by changing the comparison code. */
8299 if (code0 == INTEGER_CST)
8301 /* CST <= arg1 -> CST-1 < arg1. */
8302 if (code == LE_EXPR && sgn0 == 1)
8304 /* -CST < arg1 -> -CST-1 <= arg1. */
8305 else if (code == LT_EXPR && sgn0 == -1)
8307 /* CST > arg1 -> CST-1 >= arg1. */
8308 else if (code == GT_EXPR && sgn0 == 1)
8310 /* -CST >= arg1 -> -CST-1 > arg1. */
8311 else if (code == GE_EXPR && sgn0 == -1)
8315 /* arg1 code' CST' might be more canonical. */
8320 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8322 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8324 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8325 else if (code == GT_EXPR
8326 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8328 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8329 else if (code == LE_EXPR
8330 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8332 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8333 else if (code == GE_EXPR
8334 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8338 *strict_overflow_p = true;
8341 /* Now build the constant reduced in magnitude. */
8342 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8343 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8344 if (code0 != INTEGER_CST)
8345 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8347 /* If swapping might yield to a more canonical form, do so. */
8349 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8351 return fold_build2 (code, type, t, arg1);
8354 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8355 overflow further. Try to decrease the magnitude of constants involved
8356 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8357 and put sole constants at the second argument position.
8358 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8361 maybe_canonicalize_comparison (enum tree_code code, tree type,
8362 tree arg0, tree arg1)
8365 bool strict_overflow_p;
8366 const char * const warnmsg = G_("assuming signed overflow does not occur "
8367 "when reducing constant in comparison");
8369 /* In principle pointers also have undefined overflow behavior,
8370 but that causes problems elsewhere. */
8371 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8372 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8375 /* Try canonicalization by simplifying arg0. */
8376 strict_overflow_p = false;
8377 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8378 &strict_overflow_p);
8381 if (strict_overflow_p)
8382 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8386 /* Try canonicalization by simplifying arg1 using the swapped
8388 code = swap_tree_comparison (code);
8389 strict_overflow_p = false;
8390 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8391 &strict_overflow_p);
8392 if (t && strict_overflow_p)
8393 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8397 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8398 space. This is used to avoid issuing overflow warnings for
8399 expressions like &p->x which can not wrap. */
8402 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8404 unsigned HOST_WIDE_INT offset_low, total_low;
8405 HOST_WIDE_INT size, offset_high, total_high;
8407 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8413 if (offset == NULL_TREE)
8418 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8422 offset_low = TREE_INT_CST_LOW (offset);
8423 offset_high = TREE_INT_CST_HIGH (offset);
8426 if (add_double_with_sign (offset_low, offset_high,
8427 bitpos / BITS_PER_UNIT, 0,
8428 &total_low, &total_high,
8432 if (total_high != 0)
8435 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8439 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8441 if (TREE_CODE (base) == ADDR_EXPR)
8443 HOST_WIDE_INT base_size;
8445 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8446 if (base_size > 0 && size < base_size)
8450 return total_low > (unsigned HOST_WIDE_INT) size;
8453 /* Subroutine of fold_binary. This routine performs all of the
8454 transformations that are common to the equality/inequality
8455 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8456 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8457 fold_binary should call fold_binary. Fold a comparison with
8458 tree code CODE and type TYPE with operands OP0 and OP1. Return
8459 the folded comparison or NULL_TREE. */
8462 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8464 tree arg0, arg1, tem;
8469 STRIP_SIGN_NOPS (arg0);
8470 STRIP_SIGN_NOPS (arg1);
8472 tem = fold_relational_const (code, type, arg0, arg1);
8473 if (tem != NULL_TREE)
8476 /* If one arg is a real or integer constant, put it last. */
8477 if (tree_swap_operands_p (arg0, arg1, true))
8478 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8480 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8481 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8482 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8483 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8484 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8485 && (TREE_CODE (arg1) == INTEGER_CST
8486 && !TREE_OVERFLOW (arg1)))
8488 tree const1 = TREE_OPERAND (arg0, 1);
8490 tree variable = TREE_OPERAND (arg0, 0);
8493 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8495 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8496 TREE_TYPE (arg1), const2, const1);
8498 /* If the constant operation overflowed this can be
8499 simplified as a comparison against INT_MAX/INT_MIN. */
8500 if (TREE_CODE (lhs) == INTEGER_CST
8501 && TREE_OVERFLOW (lhs))
8503 int const1_sgn = tree_int_cst_sgn (const1);
8504 enum tree_code code2 = code;
8506 /* Get the sign of the constant on the lhs if the
8507 operation were VARIABLE + CONST1. */
8508 if (TREE_CODE (arg0) == MINUS_EXPR)
8509 const1_sgn = -const1_sgn;
8511 /* The sign of the constant determines if we overflowed
8512 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8513 Canonicalize to the INT_MIN overflow by swapping the comparison
8515 if (const1_sgn == -1)
8516 code2 = swap_tree_comparison (code);
8518 /* We now can look at the canonicalized case
8519 VARIABLE + 1 CODE2 INT_MIN
8520 and decide on the result. */
8521 if (code2 == LT_EXPR
8523 || code2 == EQ_EXPR)
8524 return omit_one_operand (type, boolean_false_node, variable);
8525 else if (code2 == NE_EXPR
8527 || code2 == GT_EXPR)
8528 return omit_one_operand (type, boolean_true_node, variable);
8531 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8532 && (TREE_CODE (lhs) != INTEGER_CST
8533 || !TREE_OVERFLOW (lhs)))
8535 fold_overflow_warning (("assuming signed overflow does not occur "
8536 "when changing X +- C1 cmp C2 to "
8538 WARN_STRICT_OVERFLOW_COMPARISON);
8539 return fold_build2 (code, type, variable, lhs);
8543 /* For comparisons of pointers we can decompose it to a compile time
8544 comparison of the base objects and the offsets into the object.
8545 This requires at least one operand being an ADDR_EXPR or a
8546 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8547 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8548 && (TREE_CODE (arg0) == ADDR_EXPR
8549 || TREE_CODE (arg1) == ADDR_EXPR
8550 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8551 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8553 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8554 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8555 enum machine_mode mode;
8556 int volatilep, unsignedp;
8557 bool indirect_base0 = false, indirect_base1 = false;
8559 /* Get base and offset for the access. Strip ADDR_EXPR for
8560 get_inner_reference, but put it back by stripping INDIRECT_REF
8561 off the base object if possible. indirect_baseN will be true
8562 if baseN is not an address but refers to the object itself. */
8564 if (TREE_CODE (arg0) == ADDR_EXPR)
8566 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8567 &bitsize, &bitpos0, &offset0, &mode,
8568 &unsignedp, &volatilep, false);
8569 if (TREE_CODE (base0) == INDIRECT_REF)
8570 base0 = TREE_OPERAND (base0, 0);
8572 indirect_base0 = true;
8574 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8576 base0 = TREE_OPERAND (arg0, 0);
8577 offset0 = TREE_OPERAND (arg0, 1);
8581 if (TREE_CODE (arg1) == ADDR_EXPR)
8583 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8584 &bitsize, &bitpos1, &offset1, &mode,
8585 &unsignedp, &volatilep, false);
8586 if (TREE_CODE (base1) == INDIRECT_REF)
8587 base1 = TREE_OPERAND (base1, 0);
8589 indirect_base1 = true;
8591 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8593 base1 = TREE_OPERAND (arg1, 0);
8594 offset1 = TREE_OPERAND (arg1, 1);
8597 /* If we have equivalent bases we might be able to simplify. */
8598 if (indirect_base0 == indirect_base1
8599 && operand_equal_p (base0, base1, 0))
8601 /* We can fold this expression to a constant if the non-constant
8602 offset parts are equal. */
8603 if ((offset0 == offset1
8604 || (offset0 && offset1
8605 && operand_equal_p (offset0, offset1, 0)))
8608 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8613 && bitpos0 != bitpos1
8614 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8615 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8616 fold_overflow_warning (("assuming pointer wraparound does not "
8617 "occur when comparing P +- C1 with "
8619 WARN_STRICT_OVERFLOW_CONDITIONAL);
8624 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8626 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8628 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8630 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8632 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8634 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8638 /* We can simplify the comparison to a comparison of the variable
8639 offset parts if the constant offset parts are equal.
8640 Be careful to use signed size type here because otherwise we
8641 mess with array offsets in the wrong way. This is possible
8642 because pointer arithmetic is restricted to retain within an
8643 object and overflow on pointer differences is undefined as of
8644 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8645 else if (bitpos0 == bitpos1
8646 && ((code == EQ_EXPR || code == NE_EXPR)
8647 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8649 tree signed_size_type_node;
8650 signed_size_type_node = signed_type_for (size_type_node);
8652 /* By converting to signed size type we cover middle-end pointer
8653 arithmetic which operates on unsigned pointer types of size
8654 type size and ARRAY_REF offsets which are properly sign or
8655 zero extended from their type in case it is narrower than
8657 if (offset0 == NULL_TREE)
8658 offset0 = build_int_cst (signed_size_type_node, 0);
8660 offset0 = fold_convert (signed_size_type_node, offset0);
8661 if (offset1 == NULL_TREE)
8662 offset1 = build_int_cst (signed_size_type_node, 0);
8664 offset1 = fold_convert (signed_size_type_node, offset1);
8668 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8669 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8670 fold_overflow_warning (("assuming pointer wraparound does not "
8671 "occur when comparing P +- C1 with "
8673 WARN_STRICT_OVERFLOW_COMPARISON);
8675 return fold_build2 (code, type, offset0, offset1);
8678 /* For non-equal bases we can simplify if they are addresses
8679 of local binding decls or constants. */
8680 else if (indirect_base0 && indirect_base1
8681 /* We know that !operand_equal_p (base0, base1, 0)
8682 because the if condition was false. But make
8683 sure two decls are not the same. */
8685 && TREE_CODE (arg0) == ADDR_EXPR
8686 && TREE_CODE (arg1) == ADDR_EXPR
8687 && (((TREE_CODE (base0) == VAR_DECL
8688 || TREE_CODE (base0) == PARM_DECL)
8689 && (targetm.binds_local_p (base0)
8690 || CONSTANT_CLASS_P (base1)))
8691 || CONSTANT_CLASS_P (base0))
8692 && (((TREE_CODE (base1) == VAR_DECL
8693 || TREE_CODE (base1) == PARM_DECL)
8694 && (targetm.binds_local_p (base1)
8695 || CONSTANT_CLASS_P (base0)))
8696 || CONSTANT_CLASS_P (base1)))
8698 if (code == EQ_EXPR)
8699 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8700 else if (code == NE_EXPR)
8701 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8703 /* For equal offsets we can simplify to a comparison of the
8705 else if (bitpos0 == bitpos1
8707 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8709 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8710 && ((offset0 == offset1)
8711 || (offset0 && offset1
8712 && operand_equal_p (offset0, offset1, 0))))
8715 base0 = fold_addr_expr (base0);
8717 base1 = fold_addr_expr (base1);
8718 return fold_build2 (code, type, base0, base1);
8722 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8723 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8724 the resulting offset is smaller in absolute value than the
8726 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8727 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8728 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8729 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8730 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8731 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8732 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8734 tree const1 = TREE_OPERAND (arg0, 1);
8735 tree const2 = TREE_OPERAND (arg1, 1);
8736 tree variable1 = TREE_OPERAND (arg0, 0);
8737 tree variable2 = TREE_OPERAND (arg1, 0);
8739 const char * const warnmsg = G_("assuming signed overflow does not "
8740 "occur when combining constants around "
8743 /* Put the constant on the side where it doesn't overflow and is
8744 of lower absolute value than before. */
8745 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8746 ? MINUS_EXPR : PLUS_EXPR,
8748 if (!TREE_OVERFLOW (cst)
8749 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8751 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8752 return fold_build2 (code, type,
8754 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8758 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8759 ? MINUS_EXPR : PLUS_EXPR,
8761 if (!TREE_OVERFLOW (cst)
8762 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8764 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8765 return fold_build2 (code, type,
8766 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8772 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8773 signed arithmetic case. That form is created by the compiler
8774 often enough for folding it to be of value. One example is in
8775 computing loop trip counts after Operator Strength Reduction. */
8776 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8777 && TREE_CODE (arg0) == MULT_EXPR
8778 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8779 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8780 && integer_zerop (arg1))
8782 tree const1 = TREE_OPERAND (arg0, 1);
8783 tree const2 = arg1; /* zero */
8784 tree variable1 = TREE_OPERAND (arg0, 0);
8785 enum tree_code cmp_code = code;
8787 gcc_assert (!integer_zerop (const1));
8789 fold_overflow_warning (("assuming signed overflow does not occur when "
8790 "eliminating multiplication in comparison "
8792 WARN_STRICT_OVERFLOW_COMPARISON);
8794 /* If const1 is negative we swap the sense of the comparison. */
8795 if (tree_int_cst_sgn (const1) < 0)
8796 cmp_code = swap_tree_comparison (cmp_code);
8798 return fold_build2 (cmp_code, type, variable1, const2);
8801 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8805 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8807 tree targ0 = strip_float_extensions (arg0);
8808 tree targ1 = strip_float_extensions (arg1);
8809 tree newtype = TREE_TYPE (targ0);
8811 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8812 newtype = TREE_TYPE (targ1);
8814 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8815 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8816 return fold_build2 (code, type, fold_convert (newtype, targ0),
8817 fold_convert (newtype, targ1));
8819 /* (-a) CMP (-b) -> b CMP a */
8820 if (TREE_CODE (arg0) == NEGATE_EXPR
8821 && TREE_CODE (arg1) == NEGATE_EXPR)
8822 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8823 TREE_OPERAND (arg0, 0));
8825 if (TREE_CODE (arg1) == REAL_CST)
8827 REAL_VALUE_TYPE cst;
8828 cst = TREE_REAL_CST (arg1);
8830 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8831 if (TREE_CODE (arg0) == NEGATE_EXPR)
8832 return fold_build2 (swap_tree_comparison (code), type,
8833 TREE_OPERAND (arg0, 0),
8834 build_real (TREE_TYPE (arg1),
8835 REAL_VALUE_NEGATE (cst)));
8837 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8838 /* a CMP (-0) -> a CMP 0 */
8839 if (REAL_VALUE_MINUS_ZERO (cst))
8840 return fold_build2 (code, type, arg0,
8841 build_real (TREE_TYPE (arg1), dconst0));
8843 /* x != NaN is always true, other ops are always false. */
8844 if (REAL_VALUE_ISNAN (cst)
8845 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8847 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8848 return omit_one_operand (type, tem, arg0);
8851 /* Fold comparisons against infinity. */
8852 if (REAL_VALUE_ISINF (cst))
8854 tem = fold_inf_compare (code, type, arg0, arg1);
8855 if (tem != NULL_TREE)
8860 /* If this is a comparison of a real constant with a PLUS_EXPR
8861 or a MINUS_EXPR of a real constant, we can convert it into a
8862 comparison with a revised real constant as long as no overflow
8863 occurs when unsafe_math_optimizations are enabled. */
8864 if (flag_unsafe_math_optimizations
8865 && TREE_CODE (arg1) == REAL_CST
8866 && (TREE_CODE (arg0) == PLUS_EXPR
8867 || TREE_CODE (arg0) == MINUS_EXPR)
8868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8869 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8870 ? MINUS_EXPR : PLUS_EXPR,
8871 arg1, TREE_OPERAND (arg0, 1), 0))
8872 && !TREE_OVERFLOW (tem))
8873 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8875 /* Likewise, we can simplify a comparison of a real constant with
8876 a MINUS_EXPR whose first operand is also a real constant, i.e.
8877 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8878 floating-point types only if -fassociative-math is set. */
8879 if (flag_associative_math
8880 && TREE_CODE (arg1) == REAL_CST
8881 && TREE_CODE (arg0) == MINUS_EXPR
8882 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8883 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8885 && !TREE_OVERFLOW (tem))
8886 return fold_build2 (swap_tree_comparison (code), type,
8887 TREE_OPERAND (arg0, 1), tem);
8889 /* Fold comparisons against built-in math functions. */
8890 if (TREE_CODE (arg1) == REAL_CST
8891 && flag_unsafe_math_optimizations
8892 && ! flag_errno_math)
8894 enum built_in_function fcode = builtin_mathfn_code (arg0);
8896 if (fcode != END_BUILTINS)
8898 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8899 if (tem != NULL_TREE)
8905 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8906 && CONVERT_EXPR_P (arg0))
8908 /* If we are widening one operand of an integer comparison,
8909 see if the other operand is similarly being widened. Perhaps we
8910 can do the comparison in the narrower type. */
8911 tem = fold_widened_comparison (code, type, arg0, arg1);
8915 /* Or if we are changing signedness. */
8916 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8921 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8922 constant, we can simplify it. */
8923 if (TREE_CODE (arg1) == INTEGER_CST
8924 && (TREE_CODE (arg0) == MIN_EXPR
8925 || TREE_CODE (arg0) == MAX_EXPR)
8926 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8928 tem = optimize_minmax_comparison (code, type, op0, op1);
8933 /* Simplify comparison of something with itself. (For IEEE
8934 floating-point, we can only do some of these simplifications.) */
8935 if (operand_equal_p (arg0, arg1, 0))
8940 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8941 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8942 return constant_boolean_node (1, type);
8947 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8948 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8949 return constant_boolean_node (1, type);
8950 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8953 /* For NE, we can only do this simplification if integer
8954 or we don't honor IEEE floating point NaNs. */
8955 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8956 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8958 /* ... fall through ... */
8961 return constant_boolean_node (0, type);
8967 /* If we are comparing an expression that just has comparisons
8968 of two integer values, arithmetic expressions of those comparisons,
8969 and constants, we can simplify it. There are only three cases
8970 to check: the two values can either be equal, the first can be
8971 greater, or the second can be greater. Fold the expression for
8972 those three values. Since each value must be 0 or 1, we have
8973 eight possibilities, each of which corresponds to the constant 0
8974 or 1 or one of the six possible comparisons.
8976 This handles common cases like (a > b) == 0 but also handles
8977 expressions like ((x > y) - (y > x)) > 0, which supposedly
8978 occur in macroized code. */
8980 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8982 tree cval1 = 0, cval2 = 0;
8985 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8986 /* Don't handle degenerate cases here; they should already
8987 have been handled anyway. */
8988 && cval1 != 0 && cval2 != 0
8989 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8990 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8991 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8992 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8993 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8994 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8995 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8997 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8998 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9000 /* We can't just pass T to eval_subst in case cval1 or cval2
9001 was the same as ARG1. */
9004 = fold_build2 (code, type,
9005 eval_subst (arg0, cval1, maxval,
9009 = fold_build2 (code, type,
9010 eval_subst (arg0, cval1, maxval,
9014 = fold_build2 (code, type,
9015 eval_subst (arg0, cval1, minval,
9019 /* All three of these results should be 0 or 1. Confirm they are.
9020 Then use those values to select the proper code to use. */
9022 if (TREE_CODE (high_result) == INTEGER_CST
9023 && TREE_CODE (equal_result) == INTEGER_CST
9024 && TREE_CODE (low_result) == INTEGER_CST)
9026 /* Make a 3-bit mask with the high-order bit being the
9027 value for `>', the next for '=', and the low for '<'. */
9028 switch ((integer_onep (high_result) * 4)
9029 + (integer_onep (equal_result) * 2)
9030 + integer_onep (low_result))
9034 return omit_one_operand (type, integer_zero_node, arg0);
9055 return omit_one_operand (type, integer_one_node, arg0);
9059 return save_expr (build2 (code, type, cval1, cval2));
9060 return fold_build2 (code, type, cval1, cval2);
9065 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9066 into a single range test. */
9067 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9068 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9069 && TREE_CODE (arg1) == INTEGER_CST
9070 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9071 && !integer_zerop (TREE_OPERAND (arg0, 1))
9072 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9073 && !TREE_OVERFLOW (arg1))
9075 tem = fold_div_compare (code, type, arg0, arg1);
9076 if (tem != NULL_TREE)
9080 /* Fold ~X op ~Y as Y op X. */
9081 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9082 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9084 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9085 return fold_build2 (code, type,
9086 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9087 TREE_OPERAND (arg0, 0));
9090 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9091 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9092 && TREE_CODE (arg1) == INTEGER_CST)
9094 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9095 return fold_build2 (swap_tree_comparison (code), type,
9096 TREE_OPERAND (arg0, 0),
9097 fold_build1 (BIT_NOT_EXPR, cmp_type,
9098 fold_convert (cmp_type, arg1)));
9105 /* Subroutine of fold_binary. Optimize complex multiplications of the
9106 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9107 argument EXPR represents the expression "z" of type TYPE. */
9110 fold_mult_zconjz (tree type, tree expr)
9112 tree itype = TREE_TYPE (type);
9113 tree rpart, ipart, tem;
9115 if (TREE_CODE (expr) == COMPLEX_EXPR)
9117 rpart = TREE_OPERAND (expr, 0);
9118 ipart = TREE_OPERAND (expr, 1);
9120 else if (TREE_CODE (expr) == COMPLEX_CST)
9122 rpart = TREE_REALPART (expr);
9123 ipart = TREE_IMAGPART (expr);
9127 expr = save_expr (expr);
9128 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9129 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9132 rpart = save_expr (rpart);
9133 ipart = save_expr (ipart);
9134 tem = fold_build2 (PLUS_EXPR, itype,
9135 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9136 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9137 return fold_build2 (COMPLEX_EXPR, type, tem,
9138 fold_convert (itype, integer_zero_node));
9142 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9143 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9144 guarantees that P and N have the same least significant log2(M) bits.
9145 N is not otherwise constrained. In particular, N is not normalized to
9146 0 <= N < M as is common. In general, the precise value of P is unknown.
9147 M is chosen as large as possible such that constant N can be determined.
9149 Returns M and sets *RESIDUE to N. */
9151 static unsigned HOST_WIDE_INT
9152 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9154 enum tree_code code;
9158 code = TREE_CODE (expr);
9159 if (code == ADDR_EXPR)
9161 expr = TREE_OPERAND (expr, 0);
9162 if (handled_component_p (expr))
9164 HOST_WIDE_INT bitsize, bitpos;
9166 enum machine_mode mode;
9167 int unsignedp, volatilep;
9169 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9170 &mode, &unsignedp, &volatilep, false);
9171 *residue = bitpos / BITS_PER_UNIT;
9174 if (TREE_CODE (offset) == INTEGER_CST)
9175 *residue += TREE_INT_CST_LOW (offset);
9177 /* We don't handle more complicated offset expressions. */
9182 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9183 return DECL_ALIGN_UNIT (expr);
9185 else if (code == POINTER_PLUS_EXPR)
9188 unsigned HOST_WIDE_INT modulus;
9189 enum tree_code inner_code;
9191 op0 = TREE_OPERAND (expr, 0);
9193 modulus = get_pointer_modulus_and_residue (op0, residue);
9195 op1 = TREE_OPERAND (expr, 1);
9197 inner_code = TREE_CODE (op1);
9198 if (inner_code == INTEGER_CST)
9200 *residue += TREE_INT_CST_LOW (op1);
9203 else if (inner_code == MULT_EXPR)
9205 op1 = TREE_OPERAND (op1, 1);
9206 if (TREE_CODE (op1) == INTEGER_CST)
9208 unsigned HOST_WIDE_INT align;
9210 /* Compute the greatest power-of-2 divisor of op1. */
9211 align = TREE_INT_CST_LOW (op1);
9214 /* If align is non-zero and less than *modulus, replace
9215 *modulus with align., If align is 0, then either op1 is 0
9216 or the greatest power-of-2 divisor of op1 doesn't fit in an
9217 unsigned HOST_WIDE_INT. In either case, no additional
9218 constraint is imposed. */
9220 modulus = MIN (modulus, align);
9227 /* If we get here, we were unable to determine anything useful about the
9233 /* Fold a binary expression of code CODE and type TYPE with operands
9234 OP0 and OP1. Return the folded expression if folding is
9235 successful. Otherwise, return NULL_TREE. */
9238 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9240 enum tree_code_class kind = TREE_CODE_CLASS (code);
9241 tree arg0, arg1, tem;
9242 tree t1 = NULL_TREE;
9243 bool strict_overflow_p;
9245 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9246 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9247 && TREE_CODE_LENGTH (code) == 2
9249 && op1 != NULL_TREE);
9254 /* Strip any conversions that don't change the mode. This is
9255 safe for every expression, except for a comparison expression
9256 because its signedness is derived from its operands. So, in
9257 the latter case, only strip conversions that don't change the
9258 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9261 Note that this is done as an internal manipulation within the
9262 constant folder, in order to find the simplest representation
9263 of the arguments so that their form can be studied. In any
9264 cases, the appropriate type conversions should be put back in
9265 the tree that will get out of the constant folder. */
9267 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9269 STRIP_SIGN_NOPS (arg0);
9270 STRIP_SIGN_NOPS (arg1);
9278 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9279 constant but we can't do arithmetic on them. */
9280 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9281 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9282 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9283 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9284 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9285 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9287 if (kind == tcc_binary)
9289 /* Make sure type and arg0 have the same saturating flag. */
9290 gcc_assert (TYPE_SATURATING (type)
9291 == TYPE_SATURATING (TREE_TYPE (arg0)));
9292 tem = const_binop (code, arg0, arg1, 0);
9294 else if (kind == tcc_comparison)
9295 tem = fold_relational_const (code, type, arg0, arg1);
9299 if (tem != NULL_TREE)
9301 if (TREE_TYPE (tem) != type)
9302 tem = fold_convert (type, tem);
9307 /* If this is a commutative operation, and ARG0 is a constant, move it
9308 to ARG1 to reduce the number of tests below. */
9309 if (commutative_tree_code (code)
9310 && tree_swap_operands_p (arg0, arg1, true))
9311 return fold_build2 (code, type, op1, op0);
9313 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9315 First check for cases where an arithmetic operation is applied to a
9316 compound, conditional, or comparison operation. Push the arithmetic
9317 operation inside the compound or conditional to see if any folding
9318 can then be done. Convert comparison to conditional for this purpose.
9319 The also optimizes non-constant cases that used to be done in
9322 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9323 one of the operands is a comparison and the other is a comparison, a
9324 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9325 code below would make the expression more complex. Change it to a
9326 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9327 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9329 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9330 || code == EQ_EXPR || code == NE_EXPR)
9331 && ((truth_value_p (TREE_CODE (arg0))
9332 && (truth_value_p (TREE_CODE (arg1))
9333 || (TREE_CODE (arg1) == BIT_AND_EXPR
9334 && integer_onep (TREE_OPERAND (arg1, 1)))))
9335 || (truth_value_p (TREE_CODE (arg1))
9336 && (truth_value_p (TREE_CODE (arg0))
9337 || (TREE_CODE (arg0) == BIT_AND_EXPR
9338 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9340 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9341 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9344 fold_convert (boolean_type_node, arg0),
9345 fold_convert (boolean_type_node, arg1));
9347 if (code == EQ_EXPR)
9348 tem = invert_truthvalue (tem);
9350 return fold_convert (type, tem);
9353 if (TREE_CODE_CLASS (code) == tcc_binary
9354 || TREE_CODE_CLASS (code) == tcc_comparison)
9356 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9357 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9358 fold_build2 (code, type,
9359 fold_convert (TREE_TYPE (op0),
9360 TREE_OPERAND (arg0, 1)),
9362 if (TREE_CODE (arg1) == COMPOUND_EXPR
9363 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9364 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9365 fold_build2 (code, type, op0,
9366 fold_convert (TREE_TYPE (op1),
9367 TREE_OPERAND (arg1, 1))));
9369 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9371 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9373 /*cond_first_p=*/1);
9374 if (tem != NULL_TREE)
9378 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9380 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9382 /*cond_first_p=*/0);
9383 if (tem != NULL_TREE)
9390 case POINTER_PLUS_EXPR:
9391 /* 0 +p index -> (type)index */
9392 if (integer_zerop (arg0))
9393 return non_lvalue (fold_convert (type, arg1));
9395 /* PTR +p 0 -> PTR */
9396 if (integer_zerop (arg1))
9397 return non_lvalue (fold_convert (type, arg0));
9399 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9400 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9401 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9402 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9403 fold_convert (sizetype, arg1),
9404 fold_convert (sizetype, arg0)));
9406 /* index +p PTR -> PTR +p index */
9407 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9408 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9409 return fold_build2 (POINTER_PLUS_EXPR, type,
9410 fold_convert (type, arg1),
9411 fold_convert (sizetype, arg0));
9413 /* (PTR +p B) +p A -> PTR +p (B + A) */
9414 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9417 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9418 tree arg00 = TREE_OPERAND (arg0, 0);
9419 inner = fold_build2 (PLUS_EXPR, sizetype,
9420 arg01, fold_convert (sizetype, arg1));
9421 return fold_convert (type,
9422 fold_build2 (POINTER_PLUS_EXPR,
9423 TREE_TYPE (arg00), arg00, inner));
9426 /* PTR_CST +p CST -> CST1 */
9427 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9428 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9430 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9431 of the array. Loop optimizer sometimes produce this type of
9433 if (TREE_CODE (arg0) == ADDR_EXPR)
9435 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9437 return fold_convert (type, tem);
9443 /* PTR + INT -> (INT)(PTR p+ INT) */
9444 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9445 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9446 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9449 fold_convert (sizetype, arg1)));
9450 /* INT + PTR -> (INT)(PTR p+ INT) */
9451 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9452 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9453 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9456 fold_convert (sizetype, arg0)));
9457 /* A + (-B) -> A - B */
9458 if (TREE_CODE (arg1) == NEGATE_EXPR)
9459 return fold_build2 (MINUS_EXPR, type,
9460 fold_convert (type, arg0),
9461 fold_convert (type, TREE_OPERAND (arg1, 0)));
9462 /* (-A) + B -> B - A */
9463 if (TREE_CODE (arg0) == NEGATE_EXPR
9464 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9465 return fold_build2 (MINUS_EXPR, type,
9466 fold_convert (type, arg1),
9467 fold_convert (type, TREE_OPERAND (arg0, 0)));
9469 if (INTEGRAL_TYPE_P (type))
9471 /* Convert ~A + 1 to -A. */
9472 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9473 && integer_onep (arg1))
9474 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9477 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9478 && !TYPE_OVERFLOW_TRAPS (type))
9480 tree tem = TREE_OPERAND (arg0, 0);
9483 if (operand_equal_p (tem, arg1, 0))
9485 t1 = build_int_cst_type (type, -1);
9486 return omit_one_operand (type, t1, arg1);
9491 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9492 && !TYPE_OVERFLOW_TRAPS (type))
9494 tree tem = TREE_OPERAND (arg1, 0);
9497 if (operand_equal_p (arg0, tem, 0))
9499 t1 = build_int_cst_type (type, -1);
9500 return omit_one_operand (type, t1, arg0);
9504 /* X + (X / CST) * -CST is X % CST. */
9505 if (TREE_CODE (arg1) == MULT_EXPR
9506 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9507 && operand_equal_p (arg0,
9508 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9510 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9511 tree cst1 = TREE_OPERAND (arg1, 1);
9512 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9513 if (sum && integer_zerop (sum))
9514 return fold_convert (type,
9515 fold_build2 (TRUNC_MOD_EXPR,
9516 TREE_TYPE (arg0), arg0, cst0));
9520 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9521 same or one. Make sure type is not saturating.
9522 fold_plusminus_mult_expr will re-associate. */
9523 if ((TREE_CODE (arg0) == MULT_EXPR
9524 || TREE_CODE (arg1) == MULT_EXPR)
9525 && !TYPE_SATURATING (type)
9526 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9528 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9533 if (! FLOAT_TYPE_P (type))
9535 if (integer_zerop (arg1))
9536 return non_lvalue (fold_convert (type, arg0));
9538 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9539 with a constant, and the two constants have no bits in common,
9540 we should treat this as a BIT_IOR_EXPR since this may produce more
9542 if (TREE_CODE (arg0) == BIT_AND_EXPR
9543 && TREE_CODE (arg1) == BIT_AND_EXPR
9544 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9545 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9546 && integer_zerop (const_binop (BIT_AND_EXPR,
9547 TREE_OPERAND (arg0, 1),
9548 TREE_OPERAND (arg1, 1), 0)))
9550 code = BIT_IOR_EXPR;
9554 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9555 (plus (plus (mult) (mult)) (foo)) so that we can
9556 take advantage of the factoring cases below. */
9557 if (((TREE_CODE (arg0) == PLUS_EXPR
9558 || TREE_CODE (arg0) == MINUS_EXPR)
9559 && TREE_CODE (arg1) == MULT_EXPR)
9560 || ((TREE_CODE (arg1) == PLUS_EXPR
9561 || TREE_CODE (arg1) == MINUS_EXPR)
9562 && TREE_CODE (arg0) == MULT_EXPR))
9564 tree parg0, parg1, parg, marg;
9565 enum tree_code pcode;
9567 if (TREE_CODE (arg1) == MULT_EXPR)
9568 parg = arg0, marg = arg1;
9570 parg = arg1, marg = arg0;
9571 pcode = TREE_CODE (parg);
9572 parg0 = TREE_OPERAND (parg, 0);
9573 parg1 = TREE_OPERAND (parg, 1);
9577 if (TREE_CODE (parg0) == MULT_EXPR
9578 && TREE_CODE (parg1) != MULT_EXPR)
9579 return fold_build2 (pcode, type,
9580 fold_build2 (PLUS_EXPR, type,
9581 fold_convert (type, parg0),
9582 fold_convert (type, marg)),
9583 fold_convert (type, parg1));
9584 if (TREE_CODE (parg0) != MULT_EXPR
9585 && TREE_CODE (parg1) == MULT_EXPR)
9586 return fold_build2 (PLUS_EXPR, type,
9587 fold_convert (type, parg0),
9588 fold_build2 (pcode, type,
9589 fold_convert (type, marg),
9596 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9597 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9598 return non_lvalue (fold_convert (type, arg0));
9600 /* Likewise if the operands are reversed. */
9601 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9602 return non_lvalue (fold_convert (type, arg1));
9604 /* Convert X + -C into X - C. */
9605 if (TREE_CODE (arg1) == REAL_CST
9606 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9608 tem = fold_negate_const (arg1, type);
9609 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9610 return fold_build2 (MINUS_EXPR, type,
9611 fold_convert (type, arg0),
9612 fold_convert (type, tem));
9615 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9616 to __complex__ ( x, y ). This is not the same for SNaNs or
9617 if signed zeros are involved. */
9618 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9620 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9622 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9623 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9624 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9625 bool arg0rz = false, arg0iz = false;
9626 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9627 || (arg0i && (arg0iz = real_zerop (arg0i))))
9629 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9630 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9631 if (arg0rz && arg1i && real_zerop (arg1i))
9633 tree rp = arg1r ? arg1r
9634 : build1 (REALPART_EXPR, rtype, arg1);
9635 tree ip = arg0i ? arg0i
9636 : build1 (IMAGPART_EXPR, rtype, arg0);
9637 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9639 else if (arg0iz && arg1r && real_zerop (arg1r))
9641 tree rp = arg0r ? arg0r
9642 : build1 (REALPART_EXPR, rtype, arg0);
9643 tree ip = arg1i ? arg1i
9644 : build1 (IMAGPART_EXPR, rtype, arg1);
9645 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9650 if (flag_unsafe_math_optimizations
9651 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9652 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9653 && (tem = distribute_real_division (code, type, arg0, arg1)))
9656 /* Convert x+x into x*2.0. */
9657 if (operand_equal_p (arg0, arg1, 0)
9658 && SCALAR_FLOAT_TYPE_P (type))
9659 return fold_build2 (MULT_EXPR, type, arg0,
9660 build_real (type, dconst2));
9662 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9663 We associate floats only if the user has specified
9664 -fassociative-math. */
9665 if (flag_associative_math
9666 && TREE_CODE (arg1) == PLUS_EXPR
9667 && TREE_CODE (arg0) != MULT_EXPR)
9669 tree tree10 = TREE_OPERAND (arg1, 0);
9670 tree tree11 = TREE_OPERAND (arg1, 1);
9671 if (TREE_CODE (tree11) == MULT_EXPR
9672 && TREE_CODE (tree10) == MULT_EXPR)
9675 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9676 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9679 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9680 We associate floats only if the user has specified
9681 -fassociative-math. */
9682 if (flag_associative_math
9683 && TREE_CODE (arg0) == PLUS_EXPR
9684 && TREE_CODE (arg1) != MULT_EXPR)
9686 tree tree00 = TREE_OPERAND (arg0, 0);
9687 tree tree01 = TREE_OPERAND (arg0, 1);
9688 if (TREE_CODE (tree01) == MULT_EXPR
9689 && TREE_CODE (tree00) == MULT_EXPR)
9692 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9693 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9699 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9700 is a rotate of A by C1 bits. */
9701 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9702 is a rotate of A by B bits. */
9704 enum tree_code code0, code1;
9706 code0 = TREE_CODE (arg0);
9707 code1 = TREE_CODE (arg1);
9708 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9709 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9710 && operand_equal_p (TREE_OPERAND (arg0, 0),
9711 TREE_OPERAND (arg1, 0), 0)
9712 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9713 TYPE_UNSIGNED (rtype))
9714 /* Only create rotates in complete modes. Other cases are not
9715 expanded properly. */
9716 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9718 tree tree01, tree11;
9719 enum tree_code code01, code11;
9721 tree01 = TREE_OPERAND (arg0, 1);
9722 tree11 = TREE_OPERAND (arg1, 1);
9723 STRIP_NOPS (tree01);
9724 STRIP_NOPS (tree11);
9725 code01 = TREE_CODE (tree01);
9726 code11 = TREE_CODE (tree11);
9727 if (code01 == INTEGER_CST
9728 && code11 == INTEGER_CST
9729 && TREE_INT_CST_HIGH (tree01) == 0
9730 && TREE_INT_CST_HIGH (tree11) == 0
9731 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9732 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9733 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9734 code0 == LSHIFT_EXPR ? tree01 : tree11);
9735 else if (code11 == MINUS_EXPR)
9737 tree tree110, tree111;
9738 tree110 = TREE_OPERAND (tree11, 0);
9739 tree111 = TREE_OPERAND (tree11, 1);
9740 STRIP_NOPS (tree110);
9741 STRIP_NOPS (tree111);
9742 if (TREE_CODE (tree110) == INTEGER_CST
9743 && 0 == compare_tree_int (tree110,
9745 (TREE_TYPE (TREE_OPERAND
9747 && operand_equal_p (tree01, tree111, 0))
9748 return build2 ((code0 == LSHIFT_EXPR
9751 type, TREE_OPERAND (arg0, 0), tree01);
9753 else if (code01 == MINUS_EXPR)
9755 tree tree010, tree011;
9756 tree010 = TREE_OPERAND (tree01, 0);
9757 tree011 = TREE_OPERAND (tree01, 1);
9758 STRIP_NOPS (tree010);
9759 STRIP_NOPS (tree011);
9760 if (TREE_CODE (tree010) == INTEGER_CST
9761 && 0 == compare_tree_int (tree010,
9763 (TREE_TYPE (TREE_OPERAND
9765 && operand_equal_p (tree11, tree011, 0))
9766 return build2 ((code0 != LSHIFT_EXPR
9769 type, TREE_OPERAND (arg0, 0), tree11);
9775 /* In most languages, can't associate operations on floats through
9776 parentheses. Rather than remember where the parentheses were, we
9777 don't associate floats at all, unless the user has specified
9779 And, we need to make sure type is not saturating. */
9781 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9782 && !TYPE_SATURATING (type))
9784 tree var0, con0, lit0, minus_lit0;
9785 tree var1, con1, lit1, minus_lit1;
9788 /* Split both trees into variables, constants, and literals. Then
9789 associate each group together, the constants with literals,
9790 then the result with variables. This increases the chances of
9791 literals being recombined later and of generating relocatable
9792 expressions for the sum of a constant and literal. */
9793 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9794 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9795 code == MINUS_EXPR);
9797 /* With undefined overflow we can only associate constants
9798 with one variable. */
9799 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9800 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9806 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9807 tmp0 = TREE_OPERAND (tmp0, 0);
9808 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9809 tmp1 = TREE_OPERAND (tmp1, 0);
9810 /* The only case we can still associate with two variables
9811 is if they are the same, modulo negation. */
9812 if (!operand_equal_p (tmp0, tmp1, 0))
9816 /* Only do something if we found more than two objects. Otherwise,
9817 nothing has changed and we risk infinite recursion. */
9819 && (2 < ((var0 != 0) + (var1 != 0)
9820 + (con0 != 0) + (con1 != 0)
9821 + (lit0 != 0) + (lit1 != 0)
9822 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9824 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9825 if (code == MINUS_EXPR)
9828 var0 = associate_trees (var0, var1, code, type);
9829 con0 = associate_trees (con0, con1, code, type);
9830 lit0 = associate_trees (lit0, lit1, code, type);
9831 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9833 /* Preserve the MINUS_EXPR if the negative part of the literal is
9834 greater than the positive part. Otherwise, the multiplicative
9835 folding code (i.e extract_muldiv) may be fooled in case
9836 unsigned constants are subtracted, like in the following
9837 example: ((X*2 + 4) - 8U)/2. */
9838 if (minus_lit0 && lit0)
9840 if (TREE_CODE (lit0) == INTEGER_CST
9841 && TREE_CODE (minus_lit0) == INTEGER_CST
9842 && tree_int_cst_lt (lit0, minus_lit0))
9844 minus_lit0 = associate_trees (minus_lit0, lit0,
9850 lit0 = associate_trees (lit0, minus_lit0,
9858 return fold_convert (type,
9859 associate_trees (var0, minus_lit0,
9863 con0 = associate_trees (con0, minus_lit0,
9865 return fold_convert (type,
9866 associate_trees (var0, con0,
9871 con0 = associate_trees (con0, lit0, code, type);
9872 return fold_convert (type, associate_trees (var0, con0,
9880 /* Pointer simplifications for subtraction, simple reassociations. */
9881 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9883 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9884 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9885 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9887 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9888 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9889 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9890 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9891 return fold_build2 (PLUS_EXPR, type,
9892 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9893 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9895 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9896 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9898 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9899 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9900 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9902 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9905 /* A - (-B) -> A + B */
9906 if (TREE_CODE (arg1) == NEGATE_EXPR)
9907 return fold_build2 (PLUS_EXPR, type, op0,
9908 fold_convert (type, TREE_OPERAND (arg1, 0)));
9909 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9910 if (TREE_CODE (arg0) == NEGATE_EXPR
9911 && (FLOAT_TYPE_P (type)
9912 || INTEGRAL_TYPE_P (type))
9913 && negate_expr_p (arg1)
9914 && reorder_operands_p (arg0, arg1))
9915 return fold_build2 (MINUS_EXPR, type,
9916 fold_convert (type, negate_expr (arg1)),
9917 fold_convert (type, TREE_OPERAND (arg0, 0)));
9918 /* Convert -A - 1 to ~A. */
9919 if (INTEGRAL_TYPE_P (type)
9920 && TREE_CODE (arg0) == NEGATE_EXPR
9921 && integer_onep (arg1)
9922 && !TYPE_OVERFLOW_TRAPS (type))
9923 return fold_build1 (BIT_NOT_EXPR, type,
9924 fold_convert (type, TREE_OPERAND (arg0, 0)));
9926 /* Convert -1 - A to ~A. */
9927 if (INTEGRAL_TYPE_P (type)
9928 && integer_all_onesp (arg0))
9929 return fold_build1 (BIT_NOT_EXPR, type, op1);
9932 /* X - (X / CST) * CST is X % CST. */
9933 if (INTEGRAL_TYPE_P (type)
9934 && TREE_CODE (arg1) == MULT_EXPR
9935 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9936 && operand_equal_p (arg0,
9937 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9938 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9939 TREE_OPERAND (arg1, 1), 0))
9940 return fold_convert (type,
9941 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9942 arg0, TREE_OPERAND (arg1, 1)));
9944 if (! FLOAT_TYPE_P (type))
9946 if (integer_zerop (arg0))
9947 return negate_expr (fold_convert (type, arg1));
9948 if (integer_zerop (arg1))
9949 return non_lvalue (fold_convert (type, arg0));
9951 /* Fold A - (A & B) into ~B & A. */
9952 if (!TREE_SIDE_EFFECTS (arg0)
9953 && TREE_CODE (arg1) == BIT_AND_EXPR)
9955 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9957 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9958 return fold_build2 (BIT_AND_EXPR, type,
9959 fold_build1 (BIT_NOT_EXPR, type, arg10),
9960 fold_convert (type, arg0));
9962 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9964 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9965 return fold_build2 (BIT_AND_EXPR, type,
9966 fold_build1 (BIT_NOT_EXPR, type, arg11),
9967 fold_convert (type, arg0));
9971 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9972 any power of 2 minus 1. */
9973 if (TREE_CODE (arg0) == BIT_AND_EXPR
9974 && TREE_CODE (arg1) == BIT_AND_EXPR
9975 && operand_equal_p (TREE_OPERAND (arg0, 0),
9976 TREE_OPERAND (arg1, 0), 0))
9978 tree mask0 = TREE_OPERAND (arg0, 1);
9979 tree mask1 = TREE_OPERAND (arg1, 1);
9980 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9982 if (operand_equal_p (tem, mask1, 0))
9984 tem = fold_build2 (BIT_XOR_EXPR, type,
9985 TREE_OPERAND (arg0, 0), mask1);
9986 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9991 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9992 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9993 return non_lvalue (fold_convert (type, arg0));
9995 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9996 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9997 (-ARG1 + ARG0) reduces to -ARG1. */
9998 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9999 return negate_expr (fold_convert (type, arg1));
10001 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10002 __complex__ ( x, -y ). This is not the same for SNaNs or if
10003 signed zeros are involved. */
10004 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10005 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10006 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10008 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10009 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10010 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10011 bool arg0rz = false, arg0iz = false;
10012 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10013 || (arg0i && (arg0iz = real_zerop (arg0i))))
10015 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10016 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10017 if (arg0rz && arg1i && real_zerop (arg1i))
10019 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10021 : build1 (REALPART_EXPR, rtype, arg1));
10022 tree ip = arg0i ? arg0i
10023 : build1 (IMAGPART_EXPR, rtype, arg0);
10024 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10026 else if (arg0iz && arg1r && real_zerop (arg1r))
10028 tree rp = arg0r ? arg0r
10029 : build1 (REALPART_EXPR, rtype, arg0);
10030 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10032 : build1 (IMAGPART_EXPR, rtype, arg1));
10033 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10038 /* Fold &x - &x. This can happen from &x.foo - &x.
10039 This is unsafe for certain floats even in non-IEEE formats.
10040 In IEEE, it is unsafe because it does wrong for NaNs.
10041 Also note that operand_equal_p is always false if an operand
10044 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10045 && operand_equal_p (arg0, arg1, 0))
10046 return fold_convert (type, integer_zero_node);
10048 /* A - B -> A + (-B) if B is easily negatable. */
10049 if (negate_expr_p (arg1)
10050 && ((FLOAT_TYPE_P (type)
10051 /* Avoid this transformation if B is a positive REAL_CST. */
10052 && (TREE_CODE (arg1) != REAL_CST
10053 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10054 || INTEGRAL_TYPE_P (type)))
10055 return fold_build2 (PLUS_EXPR, type,
10056 fold_convert (type, arg0),
10057 fold_convert (type, negate_expr (arg1)));
10059 /* Try folding difference of addresses. */
10061 HOST_WIDE_INT diff;
10063 if ((TREE_CODE (arg0) == ADDR_EXPR
10064 || TREE_CODE (arg1) == ADDR_EXPR)
10065 && ptr_difference_const (arg0, arg1, &diff))
10066 return build_int_cst_type (type, diff);
10069 /* Fold &a[i] - &a[j] to i-j. */
10070 if (TREE_CODE (arg0) == ADDR_EXPR
10071 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10072 && TREE_CODE (arg1) == ADDR_EXPR
10073 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10075 tree aref0 = TREE_OPERAND (arg0, 0);
10076 tree aref1 = TREE_OPERAND (arg1, 0);
10077 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10078 TREE_OPERAND (aref1, 0), 0))
10080 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10081 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10082 tree esz = array_ref_element_size (aref0);
10083 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10084 return fold_build2 (MULT_EXPR, type, diff,
10085 fold_convert (type, esz));
10090 if (flag_unsafe_math_optimizations
10091 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10092 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10093 && (tem = distribute_real_division (code, type, arg0, arg1)))
10096 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10097 same or one. Make sure type is not saturating.
10098 fold_plusminus_mult_expr will re-associate. */
10099 if ((TREE_CODE (arg0) == MULT_EXPR
10100 || TREE_CODE (arg1) == MULT_EXPR)
10101 && !TYPE_SATURATING (type)
10102 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10104 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10112 /* (-A) * (-B) -> A * B */
10113 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10114 return fold_build2 (MULT_EXPR, type,
10115 fold_convert (type, TREE_OPERAND (arg0, 0)),
10116 fold_convert (type, negate_expr (arg1)));
10117 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10118 return fold_build2 (MULT_EXPR, type,
10119 fold_convert (type, negate_expr (arg0)),
10120 fold_convert (type, TREE_OPERAND (arg1, 0)));
10122 if (! FLOAT_TYPE_P (type))
10124 if (integer_zerop (arg1))
10125 return omit_one_operand (type, arg1, arg0);
10126 if (integer_onep (arg1))
10127 return non_lvalue (fold_convert (type, arg0));
10128 /* Transform x * -1 into -x. Make sure to do the negation
10129 on the original operand with conversions not stripped
10130 because we can only strip non-sign-changing conversions. */
10131 if (integer_all_onesp (arg1))
10132 return fold_convert (type, negate_expr (op0));
10133 /* Transform x * -C into -x * C if x is easily negatable. */
10134 if (TREE_CODE (arg1) == INTEGER_CST
10135 && tree_int_cst_sgn (arg1) == -1
10136 && negate_expr_p (arg0)
10137 && (tem = negate_expr (arg1)) != arg1
10138 && !TREE_OVERFLOW (tem))
10139 return fold_build2 (MULT_EXPR, type,
10140 fold_convert (type, negate_expr (arg0)), tem);
10142 /* (a * (1 << b)) is (a << b) */
10143 if (TREE_CODE (arg1) == LSHIFT_EXPR
10144 && integer_onep (TREE_OPERAND (arg1, 0)))
10145 return fold_build2 (LSHIFT_EXPR, type, op0,
10146 TREE_OPERAND (arg1, 1));
10147 if (TREE_CODE (arg0) == LSHIFT_EXPR
10148 && integer_onep (TREE_OPERAND (arg0, 0)))
10149 return fold_build2 (LSHIFT_EXPR, type, op1,
10150 TREE_OPERAND (arg0, 1));
10152 /* (A + A) * C -> A * 2 * C */
10153 if (TREE_CODE (arg0) == PLUS_EXPR
10154 && TREE_CODE (arg1) == INTEGER_CST
10155 && operand_equal_p (TREE_OPERAND (arg0, 0),
10156 TREE_OPERAND (arg0, 1), 0))
10157 return fold_build2 (MULT_EXPR, type,
10158 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10159 TREE_OPERAND (arg0, 1)),
10160 fold_build2 (MULT_EXPR, type,
10161 build_int_cst (type, 2) , arg1));
10163 strict_overflow_p = false;
10164 if (TREE_CODE (arg1) == INTEGER_CST
10165 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10166 &strict_overflow_p)))
10168 if (strict_overflow_p)
10169 fold_overflow_warning (("assuming signed overflow does not "
10170 "occur when simplifying "
10172 WARN_STRICT_OVERFLOW_MISC);
10173 return fold_convert (type, tem);
10176 /* Optimize z * conj(z) for integer complex numbers. */
10177 if (TREE_CODE (arg0) == CONJ_EXPR
10178 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10179 return fold_mult_zconjz (type, arg1);
10180 if (TREE_CODE (arg1) == CONJ_EXPR
10181 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10182 return fold_mult_zconjz (type, arg0);
10186 /* Maybe fold x * 0 to 0. The expressions aren't the same
10187 when x is NaN, since x * 0 is also NaN. Nor are they the
10188 same in modes with signed zeros, since multiplying a
10189 negative value by 0 gives -0, not +0. */
10190 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10191 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10192 && real_zerop (arg1))
10193 return omit_one_operand (type, arg1, arg0);
10194 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10195 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10196 && real_onep (arg1))
10197 return non_lvalue (fold_convert (type, arg0));
10199 /* Transform x * -1.0 into -x. */
10200 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10201 && real_minus_onep (arg1))
10202 return fold_convert (type, negate_expr (arg0));
10204 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10205 the result for floating point types due to rounding so it is applied
10206 only if -fassociative-math was specify. */
10207 if (flag_associative_math
10208 && TREE_CODE (arg0) == RDIV_EXPR
10209 && TREE_CODE (arg1) == REAL_CST
10210 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10212 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10215 return fold_build2 (RDIV_EXPR, type, tem,
10216 TREE_OPERAND (arg0, 1));
10219 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10220 if (operand_equal_p (arg0, arg1, 0))
10222 tree tem = fold_strip_sign_ops (arg0);
10223 if (tem != NULL_TREE)
10225 tem = fold_convert (type, tem);
10226 return fold_build2 (MULT_EXPR, type, tem, tem);
10230 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10231 This is not the same for NaNs or if signed zeros are
10233 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10234 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10235 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10236 && TREE_CODE (arg1) == COMPLEX_CST
10237 && real_zerop (TREE_REALPART (arg1)))
10239 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10240 if (real_onep (TREE_IMAGPART (arg1)))
10241 return fold_build2 (COMPLEX_EXPR, type,
10242 negate_expr (fold_build1 (IMAGPART_EXPR,
10244 fold_build1 (REALPART_EXPR, rtype, arg0));
10245 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10246 return fold_build2 (COMPLEX_EXPR, type,
10247 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10248 negate_expr (fold_build1 (REALPART_EXPR,
10252 /* Optimize z * conj(z) for floating point complex numbers.
10253 Guarded by flag_unsafe_math_optimizations as non-finite
10254 imaginary components don't produce scalar results. */
10255 if (flag_unsafe_math_optimizations
10256 && TREE_CODE (arg0) == CONJ_EXPR
10257 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10258 return fold_mult_zconjz (type, arg1);
10259 if (flag_unsafe_math_optimizations
10260 && TREE_CODE (arg1) == CONJ_EXPR
10261 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10262 return fold_mult_zconjz (type, arg0);
10264 if (flag_unsafe_math_optimizations)
10266 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10267 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10269 /* Optimizations of root(...)*root(...). */
10270 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10273 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10274 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10276 /* Optimize sqrt(x)*sqrt(x) as x. */
10277 if (BUILTIN_SQRT_P (fcode0)
10278 && operand_equal_p (arg00, arg10, 0)
10279 && ! HONOR_SNANS (TYPE_MODE (type)))
10282 /* Optimize root(x)*root(y) as root(x*y). */
10283 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10284 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10285 return build_call_expr (rootfn, 1, arg);
10288 /* Optimize expN(x)*expN(y) as expN(x+y). */
10289 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10291 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10292 tree arg = fold_build2 (PLUS_EXPR, type,
10293 CALL_EXPR_ARG (arg0, 0),
10294 CALL_EXPR_ARG (arg1, 0));
10295 return build_call_expr (expfn, 1, arg);
10298 /* Optimizations of pow(...)*pow(...). */
10299 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10300 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10301 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10303 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10304 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10305 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10306 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10308 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10309 if (operand_equal_p (arg01, arg11, 0))
10311 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10312 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10313 return build_call_expr (powfn, 2, arg, arg01);
10316 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10317 if (operand_equal_p (arg00, arg10, 0))
10319 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10320 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10321 return build_call_expr (powfn, 2, arg00, arg);
10325 /* Optimize tan(x)*cos(x) as sin(x). */
10326 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10327 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10328 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10329 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10330 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10331 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10332 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10333 CALL_EXPR_ARG (arg1, 0), 0))
10335 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10337 if (sinfn != NULL_TREE)
10338 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10341 /* Optimize x*pow(x,c) as pow(x,c+1). */
10342 if (fcode1 == BUILT_IN_POW
10343 || fcode1 == BUILT_IN_POWF
10344 || fcode1 == BUILT_IN_POWL)
10346 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10347 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10348 if (TREE_CODE (arg11) == REAL_CST
10349 && !TREE_OVERFLOW (arg11)
10350 && operand_equal_p (arg0, arg10, 0))
10352 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10356 c = TREE_REAL_CST (arg11);
10357 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10358 arg = build_real (type, c);
10359 return build_call_expr (powfn, 2, arg0, arg);
10363 /* Optimize pow(x,c)*x as pow(x,c+1). */
10364 if (fcode0 == BUILT_IN_POW
10365 || fcode0 == BUILT_IN_POWF
10366 || fcode0 == BUILT_IN_POWL)
10368 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10369 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10370 if (TREE_CODE (arg01) == REAL_CST
10371 && !TREE_OVERFLOW (arg01)
10372 && operand_equal_p (arg1, arg00, 0))
10374 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10378 c = TREE_REAL_CST (arg01);
10379 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10380 arg = build_real (type, c);
10381 return build_call_expr (powfn, 2, arg1, arg);
10385 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10386 if (! optimize_size
10387 && operand_equal_p (arg0, arg1, 0))
10389 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10393 tree arg = build_real (type, dconst2);
10394 return build_call_expr (powfn, 2, arg0, arg);
10403 if (integer_all_onesp (arg1))
10404 return omit_one_operand (type, arg1, arg0);
10405 if (integer_zerop (arg1))
10406 return non_lvalue (fold_convert (type, arg0));
10407 if (operand_equal_p (arg0, arg1, 0))
10408 return non_lvalue (fold_convert (type, arg0));
10410 /* ~X | X is -1. */
10411 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10412 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10414 t1 = fold_convert (type, integer_zero_node);
10415 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10416 return omit_one_operand (type, t1, arg1);
10419 /* X | ~X is -1. */
10420 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10421 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10423 t1 = fold_convert (type, integer_zero_node);
10424 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10425 return omit_one_operand (type, t1, arg0);
10428 /* Canonicalize (X & C1) | C2. */
10429 if (TREE_CODE (arg0) == BIT_AND_EXPR
10430 && TREE_CODE (arg1) == INTEGER_CST
10431 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10433 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10434 int width = TYPE_PRECISION (type), w;
10435 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10436 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10437 hi2 = TREE_INT_CST_HIGH (arg1);
10438 lo2 = TREE_INT_CST_LOW (arg1);
10440 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10441 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10442 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10444 if (width > HOST_BITS_PER_WIDE_INT)
10446 mhi = (unsigned HOST_WIDE_INT) -1
10447 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10453 mlo = (unsigned HOST_WIDE_INT) -1
10454 >> (HOST_BITS_PER_WIDE_INT - width);
10457 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10458 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10459 return fold_build2 (BIT_IOR_EXPR, type,
10460 TREE_OPERAND (arg0, 0), arg1);
10462 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10463 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10464 mode which allows further optimizations. */
10471 for (w = BITS_PER_UNIT;
10472 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10475 unsigned HOST_WIDE_INT mask
10476 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10477 if (((lo1 | lo2) & mask) == mask
10478 && (lo1 & ~mask) == 0 && hi1 == 0)
10485 if (hi3 != hi1 || lo3 != lo1)
10486 return fold_build2 (BIT_IOR_EXPR, type,
10487 fold_build2 (BIT_AND_EXPR, type,
10488 TREE_OPERAND (arg0, 0),
10489 build_int_cst_wide (type,
10494 /* (X & Y) | Y is (X, Y). */
10495 if (TREE_CODE (arg0) == BIT_AND_EXPR
10496 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10497 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10498 /* (X & Y) | X is (Y, X). */
10499 if (TREE_CODE (arg0) == BIT_AND_EXPR
10500 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10501 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10502 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10503 /* X | (X & Y) is (Y, X). */
10504 if (TREE_CODE (arg1) == BIT_AND_EXPR
10505 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10506 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10507 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10508 /* X | (Y & X) is (Y, X). */
10509 if (TREE_CODE (arg1) == BIT_AND_EXPR
10510 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10511 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10512 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10514 t1 = distribute_bit_expr (code, type, arg0, arg1);
10515 if (t1 != NULL_TREE)
10518 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10520 This results in more efficient code for machines without a NAND
10521 instruction. Combine will canonicalize to the first form
10522 which will allow use of NAND instructions provided by the
10523 backend if they exist. */
10524 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10525 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10527 return fold_build1 (BIT_NOT_EXPR, type,
10528 build2 (BIT_AND_EXPR, type,
10529 fold_convert (type,
10530 TREE_OPERAND (arg0, 0)),
10531 fold_convert (type,
10532 TREE_OPERAND (arg1, 0))));
10535 /* See if this can be simplified into a rotate first. If that
10536 is unsuccessful continue in the association code. */
10540 if (integer_zerop (arg1))
10541 return non_lvalue (fold_convert (type, arg0));
10542 if (integer_all_onesp (arg1))
10543 return fold_build1 (BIT_NOT_EXPR, type, op0);
10544 if (operand_equal_p (arg0, arg1, 0))
10545 return omit_one_operand (type, integer_zero_node, arg0);
10547 /* ~X ^ X is -1. */
10548 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10551 t1 = fold_convert (type, integer_zero_node);
10552 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10553 return omit_one_operand (type, t1, arg1);
10556 /* X ^ ~X is -1. */
10557 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10558 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10560 t1 = fold_convert (type, integer_zero_node);
10561 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10562 return omit_one_operand (type, t1, arg0);
10565 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10566 with a constant, and the two constants have no bits in common,
10567 we should treat this as a BIT_IOR_EXPR since this may produce more
10568 simplifications. */
10569 if (TREE_CODE (arg0) == BIT_AND_EXPR
10570 && TREE_CODE (arg1) == BIT_AND_EXPR
10571 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10572 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10573 && integer_zerop (const_binop (BIT_AND_EXPR,
10574 TREE_OPERAND (arg0, 1),
10575 TREE_OPERAND (arg1, 1), 0)))
10577 code = BIT_IOR_EXPR;
10581 /* (X | Y) ^ X -> Y & ~ X*/
10582 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10583 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10585 tree t2 = TREE_OPERAND (arg0, 1);
10586 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10588 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10589 fold_convert (type, t1));
10593 /* (Y | X) ^ X -> Y & ~ X*/
10594 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10595 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10597 tree t2 = TREE_OPERAND (arg0, 0);
10598 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10600 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10601 fold_convert (type, t1));
10605 /* X ^ (X | Y) -> Y & ~ X*/
10606 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10607 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10609 tree t2 = TREE_OPERAND (arg1, 1);
10610 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10612 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10613 fold_convert (type, t1));
10617 /* X ^ (Y | X) -> Y & ~ X*/
10618 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10619 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10621 tree t2 = TREE_OPERAND (arg1, 0);
10622 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10624 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10625 fold_convert (type, t1));
10629 /* Convert ~X ^ ~Y to X ^ Y. */
10630 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10631 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10632 return fold_build2 (code, type,
10633 fold_convert (type, TREE_OPERAND (arg0, 0)),
10634 fold_convert (type, TREE_OPERAND (arg1, 0)));
10636 /* Convert ~X ^ C to X ^ ~C. */
10637 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10638 && TREE_CODE (arg1) == INTEGER_CST)
10639 return fold_build2 (code, type,
10640 fold_convert (type, TREE_OPERAND (arg0, 0)),
10641 fold_build1 (BIT_NOT_EXPR, type, arg1));
10643 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10644 if (TREE_CODE (arg0) == BIT_AND_EXPR
10645 && integer_onep (TREE_OPERAND (arg0, 1))
10646 && integer_onep (arg1))
10647 return fold_build2 (EQ_EXPR, type, arg0,
10648 build_int_cst (TREE_TYPE (arg0), 0));
10650 /* Fold (X & Y) ^ Y as ~X & Y. */
10651 if (TREE_CODE (arg0) == BIT_AND_EXPR
10652 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10654 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10655 return fold_build2 (BIT_AND_EXPR, type,
10656 fold_build1 (BIT_NOT_EXPR, type, tem),
10657 fold_convert (type, arg1));
10659 /* Fold (X & Y) ^ X as ~Y & X. */
10660 if (TREE_CODE (arg0) == BIT_AND_EXPR
10661 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10662 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10664 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10665 return fold_build2 (BIT_AND_EXPR, type,
10666 fold_build1 (BIT_NOT_EXPR, type, tem),
10667 fold_convert (type, arg1));
10669 /* Fold X ^ (X & Y) as X & ~Y. */
10670 if (TREE_CODE (arg1) == BIT_AND_EXPR
10671 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10673 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10674 return fold_build2 (BIT_AND_EXPR, type,
10675 fold_convert (type, arg0),
10676 fold_build1 (BIT_NOT_EXPR, type, tem));
10678 /* Fold X ^ (Y & X) as ~Y & X. */
10679 if (TREE_CODE (arg1) == BIT_AND_EXPR
10680 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10681 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10683 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10684 return fold_build2 (BIT_AND_EXPR, type,
10685 fold_build1 (BIT_NOT_EXPR, type, tem),
10686 fold_convert (type, arg0));
10689 /* See if this can be simplified into a rotate first. If that
10690 is unsuccessful continue in the association code. */
10694 if (integer_all_onesp (arg1))
10695 return non_lvalue (fold_convert (type, arg0));
10696 if (integer_zerop (arg1))
10697 return omit_one_operand (type, arg1, arg0);
10698 if (operand_equal_p (arg0, arg1, 0))
10699 return non_lvalue (fold_convert (type, arg0));
10701 /* ~X & X is always zero. */
10702 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10703 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10704 return omit_one_operand (type, integer_zero_node, arg1);
10706 /* X & ~X is always zero. */
10707 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10708 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10709 return omit_one_operand (type, integer_zero_node, arg0);
10711 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10712 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10713 && TREE_CODE (arg1) == INTEGER_CST
10714 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10716 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10717 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10718 TREE_OPERAND (arg0, 0), tmp1);
10719 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10720 TREE_OPERAND (arg0, 1), tmp1);
10721 return fold_convert (type,
10722 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10726 /* (X | Y) & Y is (X, Y). */
10727 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10728 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10729 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10730 /* (X | Y) & X is (Y, X). */
10731 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10732 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10733 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10734 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10735 /* X & (X | Y) is (Y, X). */
10736 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10737 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10738 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10739 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10740 /* X & (Y | X) is (Y, X). */
10741 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10742 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10743 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10744 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10746 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10747 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10748 && integer_onep (TREE_OPERAND (arg0, 1))
10749 && integer_onep (arg1))
10751 tem = TREE_OPERAND (arg0, 0);
10752 return fold_build2 (EQ_EXPR, type,
10753 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10754 build_int_cst (TREE_TYPE (tem), 1)),
10755 build_int_cst (TREE_TYPE (tem), 0));
10757 /* Fold ~X & 1 as (X & 1) == 0. */
10758 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10759 && integer_onep (arg1))
10761 tem = TREE_OPERAND (arg0, 0);
10762 return fold_build2 (EQ_EXPR, type,
10763 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10764 build_int_cst (TREE_TYPE (tem), 1)),
10765 build_int_cst (TREE_TYPE (tem), 0));
10768 /* Fold (X ^ Y) & Y as ~X & Y. */
10769 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10770 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10772 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10773 return fold_build2 (BIT_AND_EXPR, type,
10774 fold_build1 (BIT_NOT_EXPR, type, tem),
10775 fold_convert (type, arg1));
10777 /* Fold (X ^ Y) & X as ~Y & X. */
10778 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10779 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10780 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10782 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10783 return fold_build2 (BIT_AND_EXPR, type,
10784 fold_build1 (BIT_NOT_EXPR, type, tem),
10785 fold_convert (type, arg1));
10787 /* Fold X & (X ^ Y) as X & ~Y. */
10788 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10789 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10791 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10792 return fold_build2 (BIT_AND_EXPR, type,
10793 fold_convert (type, arg0),
10794 fold_build1 (BIT_NOT_EXPR, type, tem));
10796 /* Fold X & (Y ^ X) as ~Y & X. */
10797 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10798 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10799 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10801 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10802 return fold_build2 (BIT_AND_EXPR, type,
10803 fold_build1 (BIT_NOT_EXPR, type, tem),
10804 fold_convert (type, arg0));
10807 t1 = distribute_bit_expr (code, type, arg0, arg1);
10808 if (t1 != NULL_TREE)
10810 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10811 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10812 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10815 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10817 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10818 && (~TREE_INT_CST_LOW (arg1)
10819 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10820 return fold_convert (type, TREE_OPERAND (arg0, 0));
10823 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10825 This results in more efficient code for machines without a NOR
10826 instruction. Combine will canonicalize to the first form
10827 which will allow use of NOR instructions provided by the
10828 backend if they exist. */
10829 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10830 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10832 return fold_build1 (BIT_NOT_EXPR, type,
10833 build2 (BIT_IOR_EXPR, type,
10834 fold_convert (type,
10835 TREE_OPERAND (arg0, 0)),
10836 fold_convert (type,
10837 TREE_OPERAND (arg1, 0))));
10840 /* If arg0 is derived from the address of an object or function, we may
10841 be able to fold this expression using the object or function's
10843 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10845 unsigned HOST_WIDE_INT modulus, residue;
10846 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10848 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10850 /* This works because modulus is a power of 2. If this weren't the
10851 case, we'd have to replace it by its greatest power-of-2
10852 divisor: modulus & -modulus. */
10854 return build_int_cst (type, residue & low);
10857 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10858 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10859 if the new mask might be further optimized. */
10860 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10861 || TREE_CODE (arg0) == RSHIFT_EXPR)
10862 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10863 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10864 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10865 < TYPE_PRECISION (TREE_TYPE (arg0))
10866 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10867 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10869 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10870 unsigned HOST_WIDE_INT mask
10871 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10872 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10873 tree shift_type = TREE_TYPE (arg0);
10875 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10876 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10877 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10878 && TYPE_PRECISION (TREE_TYPE (arg0))
10879 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10881 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10882 tree arg00 = TREE_OPERAND (arg0, 0);
10883 /* See if more bits can be proven as zero because of
10885 if (TREE_CODE (arg00) == NOP_EXPR
10886 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10888 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10889 if (TYPE_PRECISION (inner_type)
10890 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10891 && TYPE_PRECISION (inner_type) < prec)
10893 prec = TYPE_PRECISION (inner_type);
10894 /* See if we can shorten the right shift. */
10896 shift_type = inner_type;
10899 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10900 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10901 zerobits <<= prec - shiftc;
10902 /* For arithmetic shift if sign bit could be set, zerobits
10903 can contain actually sign bits, so no transformation is
10904 possible, unless MASK masks them all away. In that
10905 case the shift needs to be converted into logical shift. */
10906 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10907 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10909 if ((mask & zerobits) == 0)
10910 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10916 /* ((X << 16) & 0xff00) is (X, 0). */
10917 if ((mask & zerobits) == mask)
10918 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10920 newmask = mask | zerobits;
10921 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10925 /* Only do the transformation if NEWMASK is some integer
10927 for (prec = BITS_PER_UNIT;
10928 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10929 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10931 if (prec < HOST_BITS_PER_WIDE_INT
10932 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10934 if (shift_type != TREE_TYPE (arg0))
10936 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10937 fold_convert (shift_type,
10938 TREE_OPERAND (arg0, 0)),
10939 TREE_OPERAND (arg0, 1));
10940 tem = fold_convert (type, tem);
10944 return fold_build2 (BIT_AND_EXPR, type, tem,
10945 build_int_cst_type (TREE_TYPE (op1),
10954 /* Don't touch a floating-point divide by zero unless the mode
10955 of the constant can represent infinity. */
10956 if (TREE_CODE (arg1) == REAL_CST
10957 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10958 && real_zerop (arg1))
10961 /* Optimize A / A to 1.0 if we don't care about
10962 NaNs or Infinities. Skip the transformation
10963 for non-real operands. */
10964 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10965 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10966 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10967 && operand_equal_p (arg0, arg1, 0))
10969 tree r = build_real (TREE_TYPE (arg0), dconst1);
10971 return omit_two_operands (type, r, arg0, arg1);
10974 /* The complex version of the above A / A optimization. */
10975 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10976 && operand_equal_p (arg0, arg1, 0))
10978 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10979 if (! HONOR_NANS (TYPE_MODE (elem_type))
10980 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10982 tree r = build_real (elem_type, dconst1);
10983 /* omit_two_operands will call fold_convert for us. */
10984 return omit_two_operands (type, r, arg0, arg1);
10988 /* (-A) / (-B) -> A / B */
10989 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10990 return fold_build2 (RDIV_EXPR, type,
10991 TREE_OPERAND (arg0, 0),
10992 negate_expr (arg1));
10993 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10994 return fold_build2 (RDIV_EXPR, type,
10995 negate_expr (arg0),
10996 TREE_OPERAND (arg1, 0));
10998 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10999 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11000 && real_onep (arg1))
11001 return non_lvalue (fold_convert (type, arg0));
11003 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11004 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11005 && real_minus_onep (arg1))
11006 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11008 /* If ARG1 is a constant, we can convert this to a multiply by the
11009 reciprocal. This does not have the same rounding properties,
11010 so only do this if -freciprocal-math. We can actually
11011 always safely do it if ARG1 is a power of two, but it's hard to
11012 tell if it is or not in a portable manner. */
11013 if (TREE_CODE (arg1) == REAL_CST)
11015 if (flag_reciprocal_math
11016 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11018 return fold_build2 (MULT_EXPR, type, arg0, tem);
11019 /* Find the reciprocal if optimizing and the result is exact. */
11023 r = TREE_REAL_CST (arg1);
11024 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11026 tem = build_real (type, r);
11027 return fold_build2 (MULT_EXPR, type,
11028 fold_convert (type, arg0), tem);
11032 /* Convert A/B/C to A/(B*C). */
11033 if (flag_reciprocal_math
11034 && TREE_CODE (arg0) == RDIV_EXPR)
11035 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11036 fold_build2 (MULT_EXPR, type,
11037 TREE_OPERAND (arg0, 1), arg1));
11039 /* Convert A/(B/C) to (A/B)*C. */
11040 if (flag_reciprocal_math
11041 && TREE_CODE (arg1) == RDIV_EXPR)
11042 return fold_build2 (MULT_EXPR, type,
11043 fold_build2 (RDIV_EXPR, type, arg0,
11044 TREE_OPERAND (arg1, 0)),
11045 TREE_OPERAND (arg1, 1));
11047 /* Convert C1/(X*C2) into (C1/C2)/X. */
11048 if (flag_reciprocal_math
11049 && TREE_CODE (arg1) == MULT_EXPR
11050 && TREE_CODE (arg0) == REAL_CST
11051 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11053 tree tem = const_binop (RDIV_EXPR, arg0,
11054 TREE_OPERAND (arg1, 1), 0);
11056 return fold_build2 (RDIV_EXPR, type, tem,
11057 TREE_OPERAND (arg1, 0));
11060 if (flag_unsafe_math_optimizations)
11062 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11063 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11065 /* Optimize sin(x)/cos(x) as tan(x). */
11066 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11067 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11068 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11069 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11070 CALL_EXPR_ARG (arg1, 0), 0))
11072 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11074 if (tanfn != NULL_TREE)
11075 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11078 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11079 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11080 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11081 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11082 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11083 CALL_EXPR_ARG (arg1, 0), 0))
11085 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11087 if (tanfn != NULL_TREE)
11089 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11090 return fold_build2 (RDIV_EXPR, type,
11091 build_real (type, dconst1), tmp);
11095 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11096 NaNs or Infinities. */
11097 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11098 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11099 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11101 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11102 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11104 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11105 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11106 && operand_equal_p (arg00, arg01, 0))
11108 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11110 if (cosfn != NULL_TREE)
11111 return build_call_expr (cosfn, 1, arg00);
11115 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11116 NaNs or Infinities. */
11117 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11118 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11119 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11121 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11122 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11124 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11125 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11126 && operand_equal_p (arg00, arg01, 0))
11128 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11130 if (cosfn != NULL_TREE)
11132 tree tmp = build_call_expr (cosfn, 1, arg00);
11133 return fold_build2 (RDIV_EXPR, type,
11134 build_real (type, dconst1),
11140 /* Optimize pow(x,c)/x as pow(x,c-1). */
11141 if (fcode0 == BUILT_IN_POW
11142 || fcode0 == BUILT_IN_POWF
11143 || fcode0 == BUILT_IN_POWL)
11145 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11146 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11147 if (TREE_CODE (arg01) == REAL_CST
11148 && !TREE_OVERFLOW (arg01)
11149 && operand_equal_p (arg1, arg00, 0))
11151 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11155 c = TREE_REAL_CST (arg01);
11156 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11157 arg = build_real (type, c);
11158 return build_call_expr (powfn, 2, arg1, arg);
11162 /* Optimize a/root(b/c) into a*root(c/b). */
11163 if (BUILTIN_ROOT_P (fcode1))
11165 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11167 if (TREE_CODE (rootarg) == RDIV_EXPR)
11169 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11170 tree b = TREE_OPERAND (rootarg, 0);
11171 tree c = TREE_OPERAND (rootarg, 1);
11173 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11175 tmp = build_call_expr (rootfn, 1, tmp);
11176 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11180 /* Optimize x/expN(y) into x*expN(-y). */
11181 if (BUILTIN_EXPONENT_P (fcode1))
11183 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11184 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11185 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11186 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11189 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11190 if (fcode1 == BUILT_IN_POW
11191 || fcode1 == BUILT_IN_POWF
11192 || fcode1 == BUILT_IN_POWL)
11194 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11195 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11196 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11197 tree neg11 = fold_convert (type, negate_expr (arg11));
11198 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11199 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11204 case TRUNC_DIV_EXPR:
11205 case FLOOR_DIV_EXPR:
11206 /* Simplify A / (B << N) where A and B are positive and B is
11207 a power of 2, to A >> (N + log2(B)). */
11208 strict_overflow_p = false;
11209 if (TREE_CODE (arg1) == LSHIFT_EXPR
11210 && (TYPE_UNSIGNED (type)
11211 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11213 tree sval = TREE_OPERAND (arg1, 0);
11214 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11216 tree sh_cnt = TREE_OPERAND (arg1, 1);
11217 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11219 if (strict_overflow_p)
11220 fold_overflow_warning (("assuming signed overflow does not "
11221 "occur when simplifying A / (B << N)"),
11222 WARN_STRICT_OVERFLOW_MISC);
11224 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11225 sh_cnt, build_int_cst (NULL_TREE, pow2));
11226 return fold_build2 (RSHIFT_EXPR, type,
11227 fold_convert (type, arg0), sh_cnt);
11231 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11232 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11233 if (INTEGRAL_TYPE_P (type)
11234 && TYPE_UNSIGNED (type)
11235 && code == FLOOR_DIV_EXPR)
11236 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11240 case ROUND_DIV_EXPR:
11241 case CEIL_DIV_EXPR:
11242 case EXACT_DIV_EXPR:
11243 if (integer_onep (arg1))
11244 return non_lvalue (fold_convert (type, arg0));
11245 if (integer_zerop (arg1))
11247 /* X / -1 is -X. */
11248 if (!TYPE_UNSIGNED (type)
11249 && TREE_CODE (arg1) == INTEGER_CST
11250 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11251 && TREE_INT_CST_HIGH (arg1) == -1)
11252 return fold_convert (type, negate_expr (arg0));
11254 /* Convert -A / -B to A / B when the type is signed and overflow is
11256 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11257 && TREE_CODE (arg0) == NEGATE_EXPR
11258 && negate_expr_p (arg1))
11260 if (INTEGRAL_TYPE_P (type))
11261 fold_overflow_warning (("assuming signed overflow does not occur "
11262 "when distributing negation across "
11264 WARN_STRICT_OVERFLOW_MISC);
11265 return fold_build2 (code, type,
11266 fold_convert (type, TREE_OPERAND (arg0, 0)),
11267 negate_expr (arg1));
11269 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11270 && TREE_CODE (arg1) == NEGATE_EXPR
11271 && negate_expr_p (arg0))
11273 if (INTEGRAL_TYPE_P (type))
11274 fold_overflow_warning (("assuming signed overflow does not occur "
11275 "when distributing negation across "
11277 WARN_STRICT_OVERFLOW_MISC);
11278 return fold_build2 (code, type, negate_expr (arg0),
11279 TREE_OPERAND (arg1, 0));
11282 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11283 operation, EXACT_DIV_EXPR.
11285 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11286 At one time others generated faster code, it's not clear if they do
11287 after the last round to changes to the DIV code in expmed.c. */
11288 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11289 && multiple_of_p (type, arg0, arg1))
11290 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11292 strict_overflow_p = false;
11293 if (TREE_CODE (arg1) == INTEGER_CST
11294 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11295 &strict_overflow_p)))
11297 if (strict_overflow_p)
11298 fold_overflow_warning (("assuming signed overflow does not occur "
11299 "when simplifying division"),
11300 WARN_STRICT_OVERFLOW_MISC);
11301 return fold_convert (type, tem);
11306 case CEIL_MOD_EXPR:
11307 case FLOOR_MOD_EXPR:
11308 case ROUND_MOD_EXPR:
11309 case TRUNC_MOD_EXPR:
11310 /* X % 1 is always zero, but be sure to preserve any side
11312 if (integer_onep (arg1))
11313 return omit_one_operand (type, integer_zero_node, arg0);
11315 /* X % 0, return X % 0 unchanged so that we can get the
11316 proper warnings and errors. */
11317 if (integer_zerop (arg1))
11320 /* 0 % X is always zero, but be sure to preserve any side
11321 effects in X. Place this after checking for X == 0. */
11322 if (integer_zerop (arg0))
11323 return omit_one_operand (type, integer_zero_node, arg1);
11325 /* X % -1 is zero. */
11326 if (!TYPE_UNSIGNED (type)
11327 && TREE_CODE (arg1) == INTEGER_CST
11328 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11329 && TREE_INT_CST_HIGH (arg1) == -1)
11330 return omit_one_operand (type, integer_zero_node, arg0);
11332 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11333 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11334 strict_overflow_p = false;
11335 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11336 && (TYPE_UNSIGNED (type)
11337 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11340 /* Also optimize A % (C << N) where C is a power of 2,
11341 to A & ((C << N) - 1). */
11342 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11343 c = TREE_OPERAND (arg1, 0);
11345 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11347 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11348 build_int_cst (TREE_TYPE (arg1), 1));
11349 if (strict_overflow_p)
11350 fold_overflow_warning (("assuming signed overflow does not "
11351 "occur when simplifying "
11352 "X % (power of two)"),
11353 WARN_STRICT_OVERFLOW_MISC);
11354 return fold_build2 (BIT_AND_EXPR, type,
11355 fold_convert (type, arg0),
11356 fold_convert (type, mask));
11360 /* X % -C is the same as X % C. */
11361 if (code == TRUNC_MOD_EXPR
11362 && !TYPE_UNSIGNED (type)
11363 && TREE_CODE (arg1) == INTEGER_CST
11364 && !TREE_OVERFLOW (arg1)
11365 && TREE_INT_CST_HIGH (arg1) < 0
11366 && !TYPE_OVERFLOW_TRAPS (type)
11367 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11368 && !sign_bit_p (arg1, arg1))
11369 return fold_build2 (code, type, fold_convert (type, arg0),
11370 fold_convert (type, negate_expr (arg1)));
11372 /* X % -Y is the same as X % Y. */
11373 if (code == TRUNC_MOD_EXPR
11374 && !TYPE_UNSIGNED (type)
11375 && TREE_CODE (arg1) == NEGATE_EXPR
11376 && !TYPE_OVERFLOW_TRAPS (type))
11377 return fold_build2 (code, type, fold_convert (type, arg0),
11378 fold_convert (type, TREE_OPERAND (arg1, 0)));
11380 if (TREE_CODE (arg1) == INTEGER_CST
11381 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11382 &strict_overflow_p)))
11384 if (strict_overflow_p)
11385 fold_overflow_warning (("assuming signed overflow does not occur "
11386 "when simplifying modulos"),
11387 WARN_STRICT_OVERFLOW_MISC);
11388 return fold_convert (type, tem);
11395 if (integer_all_onesp (arg0))
11396 return omit_one_operand (type, arg0, arg1);
11400 /* Optimize -1 >> x for arithmetic right shifts. */
11401 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11402 return omit_one_operand (type, arg0, arg1);
11403 /* ... fall through ... */
11407 if (integer_zerop (arg1))
11408 return non_lvalue (fold_convert (type, arg0));
11409 if (integer_zerop (arg0))
11410 return omit_one_operand (type, arg0, arg1);
11412 /* Since negative shift count is not well-defined,
11413 don't try to compute it in the compiler. */
11414 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11417 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11418 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11419 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11420 && host_integerp (TREE_OPERAND (arg0, 1), false)
11421 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11423 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11424 + TREE_INT_CST_LOW (arg1));
11426 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11427 being well defined. */
11428 if (low >= TYPE_PRECISION (type))
11430 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11431 low = low % TYPE_PRECISION (type);
11432 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11433 return build_int_cst (type, 0);
11435 low = TYPE_PRECISION (type) - 1;
11438 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11439 build_int_cst (type, low));
11442 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11443 into x & ((unsigned)-1 >> c) for unsigned types. */
11444 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11445 || (TYPE_UNSIGNED (type)
11446 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11447 && host_integerp (arg1, false)
11448 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11449 && host_integerp (TREE_OPERAND (arg0, 1), false)
11450 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11452 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11453 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11459 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11461 lshift = build_int_cst (type, -1);
11462 lshift = int_const_binop (code, lshift, arg1, 0);
11464 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11468 /* Rewrite an LROTATE_EXPR by a constant into an
11469 RROTATE_EXPR by a new constant. */
11470 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11472 tree tem = build_int_cst (TREE_TYPE (arg1),
11473 TYPE_PRECISION (type));
11474 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11475 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11478 /* If we have a rotate of a bit operation with the rotate count and
11479 the second operand of the bit operation both constant,
11480 permute the two operations. */
11481 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11482 && (TREE_CODE (arg0) == BIT_AND_EXPR
11483 || TREE_CODE (arg0) == BIT_IOR_EXPR
11484 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11485 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11486 return fold_build2 (TREE_CODE (arg0), type,
11487 fold_build2 (code, type,
11488 TREE_OPERAND (arg0, 0), arg1),
11489 fold_build2 (code, type,
11490 TREE_OPERAND (arg0, 1), arg1));
11492 /* Two consecutive rotates adding up to the precision of the
11493 type can be ignored. */
11494 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11495 && TREE_CODE (arg0) == RROTATE_EXPR
11496 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11497 && TREE_INT_CST_HIGH (arg1) == 0
11498 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11499 && ((TREE_INT_CST_LOW (arg1)
11500 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11501 == (unsigned int) TYPE_PRECISION (type)))
11502 return TREE_OPERAND (arg0, 0);
11504 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11505 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11506 if the latter can be further optimized. */
11507 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11508 && TREE_CODE (arg0) == BIT_AND_EXPR
11509 && TREE_CODE (arg1) == INTEGER_CST
11510 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11512 tree mask = fold_build2 (code, type,
11513 fold_convert (type, TREE_OPERAND (arg0, 1)),
11515 tree shift = fold_build2 (code, type,
11516 fold_convert (type, TREE_OPERAND (arg0, 0)),
11518 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11526 if (operand_equal_p (arg0, arg1, 0))
11527 return omit_one_operand (type, arg0, arg1);
11528 if (INTEGRAL_TYPE_P (type)
11529 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11530 return omit_one_operand (type, arg1, arg0);
11531 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11537 if (operand_equal_p (arg0, arg1, 0))
11538 return omit_one_operand (type, arg0, arg1);
11539 if (INTEGRAL_TYPE_P (type)
11540 && TYPE_MAX_VALUE (type)
11541 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11542 return omit_one_operand (type, arg1, arg0);
11543 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11548 case TRUTH_ANDIF_EXPR:
11549 /* Note that the operands of this must be ints
11550 and their values must be 0 or 1.
11551 ("true" is a fixed value perhaps depending on the language.) */
11552 /* If first arg is constant zero, return it. */
11553 if (integer_zerop (arg0))
11554 return fold_convert (type, arg0);
11555 case TRUTH_AND_EXPR:
11556 /* If either arg is constant true, drop it. */
11557 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11558 return non_lvalue (fold_convert (type, arg1));
11559 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11560 /* Preserve sequence points. */
11561 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11562 return non_lvalue (fold_convert (type, arg0));
11563 /* If second arg is constant zero, result is zero, but first arg
11564 must be evaluated. */
11565 if (integer_zerop (arg1))
11566 return omit_one_operand (type, arg1, arg0);
11567 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11568 case will be handled here. */
11569 if (integer_zerop (arg0))
11570 return omit_one_operand (type, arg0, arg1);
11572 /* !X && X is always false. */
11573 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11574 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11575 return omit_one_operand (type, integer_zero_node, arg1);
11576 /* X && !X is always false. */
11577 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11578 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11579 return omit_one_operand (type, integer_zero_node, arg0);
11581 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11582 means A >= Y && A != MAX, but in this case we know that
11585 if (!TREE_SIDE_EFFECTS (arg0)
11586 && !TREE_SIDE_EFFECTS (arg1))
11588 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11589 if (tem && !operand_equal_p (tem, arg0, 0))
11590 return fold_build2 (code, type, tem, arg1);
11592 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11593 if (tem && !operand_equal_p (tem, arg1, 0))
11594 return fold_build2 (code, type, arg0, tem);
11598 /* We only do these simplifications if we are optimizing. */
11602 /* Check for things like (A || B) && (A || C). We can convert this
11603 to A || (B && C). Note that either operator can be any of the four
11604 truth and/or operations and the transformation will still be
11605 valid. Also note that we only care about order for the
11606 ANDIF and ORIF operators. If B contains side effects, this
11607 might change the truth-value of A. */
11608 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11609 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11610 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11611 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11612 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11613 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11615 tree a00 = TREE_OPERAND (arg0, 0);
11616 tree a01 = TREE_OPERAND (arg0, 1);
11617 tree a10 = TREE_OPERAND (arg1, 0);
11618 tree a11 = TREE_OPERAND (arg1, 1);
11619 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11620 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11621 && (code == TRUTH_AND_EXPR
11622 || code == TRUTH_OR_EXPR));
11624 if (operand_equal_p (a00, a10, 0))
11625 return fold_build2 (TREE_CODE (arg0), type, a00,
11626 fold_build2 (code, type, a01, a11));
11627 else if (commutative && operand_equal_p (a00, a11, 0))
11628 return fold_build2 (TREE_CODE (arg0), type, a00,
11629 fold_build2 (code, type, a01, a10));
11630 else if (commutative && operand_equal_p (a01, a10, 0))
11631 return fold_build2 (TREE_CODE (arg0), type, a01,
11632 fold_build2 (code, type, a00, a11));
11634 /* This case if tricky because we must either have commutative
11635 operators or else A10 must not have side-effects. */
11637 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11638 && operand_equal_p (a01, a11, 0))
11639 return fold_build2 (TREE_CODE (arg0), type,
11640 fold_build2 (code, type, a00, a10),
11644 /* See if we can build a range comparison. */
11645 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11648 /* Check for the possibility of merging component references. If our
11649 lhs is another similar operation, try to merge its rhs with our
11650 rhs. Then try to merge our lhs and rhs. */
11651 if (TREE_CODE (arg0) == code
11652 && 0 != (tem = fold_truthop (code, type,
11653 TREE_OPERAND (arg0, 1), arg1)))
11654 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11656 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11661 case TRUTH_ORIF_EXPR:
11662 /* Note that the operands of this must be ints
11663 and their values must be 0 or true.
11664 ("true" is a fixed value perhaps depending on the language.) */
11665 /* If first arg is constant true, return it. */
11666 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11667 return fold_convert (type, arg0);
11668 case TRUTH_OR_EXPR:
11669 /* If either arg is constant zero, drop it. */
11670 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11671 return non_lvalue (fold_convert (type, arg1));
11672 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11673 /* Preserve sequence points. */
11674 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11675 return non_lvalue (fold_convert (type, arg0));
11676 /* If second arg is constant true, result is true, but we must
11677 evaluate first arg. */
11678 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11679 return omit_one_operand (type, arg1, arg0);
11680 /* Likewise for first arg, but note this only occurs here for
11682 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11683 return omit_one_operand (type, arg0, arg1);
11685 /* !X || X is always true. */
11686 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11688 return omit_one_operand (type, integer_one_node, arg1);
11689 /* X || !X is always true. */
11690 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11692 return omit_one_operand (type, integer_one_node, arg0);
11696 case TRUTH_XOR_EXPR:
11697 /* If the second arg is constant zero, drop it. */
11698 if (integer_zerop (arg1))
11699 return non_lvalue (fold_convert (type, arg0));
11700 /* If the second arg is constant true, this is a logical inversion. */
11701 if (integer_onep (arg1))
11703 /* Only call invert_truthvalue if operand is a truth value. */
11704 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11705 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11707 tem = invert_truthvalue (arg0);
11708 return non_lvalue (fold_convert (type, tem));
11710 /* Identical arguments cancel to zero. */
11711 if (operand_equal_p (arg0, arg1, 0))
11712 return omit_one_operand (type, integer_zero_node, arg0);
11714 /* !X ^ X is always true. */
11715 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11716 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11717 return omit_one_operand (type, integer_one_node, arg1);
11719 /* X ^ !X is always true. */
11720 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11721 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11722 return omit_one_operand (type, integer_one_node, arg0);
11728 tem = fold_comparison (code, type, op0, op1);
11729 if (tem != NULL_TREE)
11732 /* bool_var != 0 becomes bool_var. */
11733 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11734 && code == NE_EXPR)
11735 return non_lvalue (fold_convert (type, arg0));
11737 /* bool_var == 1 becomes bool_var. */
11738 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11739 && code == EQ_EXPR)
11740 return non_lvalue (fold_convert (type, arg0));
11742 /* bool_var != 1 becomes !bool_var. */
11743 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11744 && code == NE_EXPR)
11745 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11747 /* bool_var == 0 becomes !bool_var. */
11748 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11749 && code == EQ_EXPR)
11750 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11752 /* If this is an equality comparison of the address of two non-weak,
11753 unaliased symbols neither of which are extern (since we do not
11754 have access to attributes for externs), then we know the result. */
11755 if (TREE_CODE (arg0) == ADDR_EXPR
11756 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11757 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11758 && ! lookup_attribute ("alias",
11759 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11760 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11761 && TREE_CODE (arg1) == ADDR_EXPR
11762 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11763 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11764 && ! lookup_attribute ("alias",
11765 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11766 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11768 /* We know that we're looking at the address of two
11769 non-weak, unaliased, static _DECL nodes.
11771 It is both wasteful and incorrect to call operand_equal_p
11772 to compare the two ADDR_EXPR nodes. It is wasteful in that
11773 all we need to do is test pointer equality for the arguments
11774 to the two ADDR_EXPR nodes. It is incorrect to use
11775 operand_equal_p as that function is NOT equivalent to a
11776 C equality test. It can in fact return false for two
11777 objects which would test as equal using the C equality
11779 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11780 return constant_boolean_node (equal
11781 ? code == EQ_EXPR : code != EQ_EXPR,
11785 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11786 a MINUS_EXPR of a constant, we can convert it into a comparison with
11787 a revised constant as long as no overflow occurs. */
11788 if (TREE_CODE (arg1) == INTEGER_CST
11789 && (TREE_CODE (arg0) == PLUS_EXPR
11790 || TREE_CODE (arg0) == MINUS_EXPR)
11791 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11792 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11793 ? MINUS_EXPR : PLUS_EXPR,
11794 fold_convert (TREE_TYPE (arg0), arg1),
11795 TREE_OPERAND (arg0, 1), 0))
11796 && !TREE_OVERFLOW (tem))
11797 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11799 /* Similarly for a NEGATE_EXPR. */
11800 if (TREE_CODE (arg0) == NEGATE_EXPR
11801 && TREE_CODE (arg1) == INTEGER_CST
11802 && 0 != (tem = negate_expr (arg1))
11803 && TREE_CODE (tem) == INTEGER_CST
11804 && !TREE_OVERFLOW (tem))
11805 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11807 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11808 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11809 && TREE_CODE (arg1) == INTEGER_CST
11810 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11811 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11812 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11813 fold_convert (TREE_TYPE (arg0), arg1),
11814 TREE_OPERAND (arg0, 1)));
11816 /* Transform comparisons of the form X +- C CMP X. */
11817 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11818 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11819 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11820 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11821 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11823 tree cst = TREE_OPERAND (arg0, 1);
11825 if (code == EQ_EXPR
11826 && !integer_zerop (cst))
11827 return omit_two_operands (type, boolean_false_node,
11828 TREE_OPERAND (arg0, 0), arg1);
11830 return omit_two_operands (type, boolean_true_node,
11831 TREE_OPERAND (arg0, 0), arg1);
11834 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11835 for !=. Don't do this for ordered comparisons due to overflow. */
11836 if (TREE_CODE (arg0) == MINUS_EXPR
11837 && integer_zerop (arg1))
11838 return fold_build2 (code, type,
11839 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11841 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11842 if (TREE_CODE (arg0) == ABS_EXPR
11843 && (integer_zerop (arg1) || real_zerop (arg1)))
11844 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11846 /* If this is an EQ or NE comparison with zero and ARG0 is
11847 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11848 two operations, but the latter can be done in one less insn
11849 on machines that have only two-operand insns or on which a
11850 constant cannot be the first operand. */
11851 if (TREE_CODE (arg0) == BIT_AND_EXPR
11852 && integer_zerop (arg1))
11854 tree arg00 = TREE_OPERAND (arg0, 0);
11855 tree arg01 = TREE_OPERAND (arg0, 1);
11856 if (TREE_CODE (arg00) == LSHIFT_EXPR
11857 && integer_onep (TREE_OPERAND (arg00, 0)))
11859 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11860 arg01, TREE_OPERAND (arg00, 1));
11861 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11862 build_int_cst (TREE_TYPE (arg0), 1));
11863 return fold_build2 (code, type,
11864 fold_convert (TREE_TYPE (arg1), tem), arg1);
11866 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11867 && integer_onep (TREE_OPERAND (arg01, 0)))
11869 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11870 arg00, TREE_OPERAND (arg01, 1));
11871 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11872 build_int_cst (TREE_TYPE (arg0), 1));
11873 return fold_build2 (code, type,
11874 fold_convert (TREE_TYPE (arg1), tem), arg1);
11878 /* If this is an NE or EQ comparison of zero against the result of a
11879 signed MOD operation whose second operand is a power of 2, make
11880 the MOD operation unsigned since it is simpler and equivalent. */
11881 if (integer_zerop (arg1)
11882 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11883 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11884 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11885 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11886 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11887 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11889 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11890 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11891 fold_convert (newtype,
11892 TREE_OPERAND (arg0, 0)),
11893 fold_convert (newtype,
11894 TREE_OPERAND (arg0, 1)));
11896 return fold_build2 (code, type, newmod,
11897 fold_convert (newtype, arg1));
11900 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11901 C1 is a valid shift constant, and C2 is a power of two, i.e.
11903 if (TREE_CODE (arg0) == BIT_AND_EXPR
11904 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11905 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11907 && integer_pow2p (TREE_OPERAND (arg0, 1))
11908 && integer_zerop (arg1))
11910 tree itype = TREE_TYPE (arg0);
11911 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11912 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11914 /* Check for a valid shift count. */
11915 if (TREE_INT_CST_HIGH (arg001) == 0
11916 && TREE_INT_CST_LOW (arg001) < prec)
11918 tree arg01 = TREE_OPERAND (arg0, 1);
11919 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11920 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11921 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11922 can be rewritten as (X & (C2 << C1)) != 0. */
11923 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11925 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11926 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11927 return fold_build2 (code, type, tem, arg1);
11929 /* Otherwise, for signed (arithmetic) shifts,
11930 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11931 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11932 else if (!TYPE_UNSIGNED (itype))
11933 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11934 arg000, build_int_cst (itype, 0));
11935 /* Otherwise, of unsigned (logical) shifts,
11936 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11937 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11939 return omit_one_operand (type,
11940 code == EQ_EXPR ? integer_one_node
11941 : integer_zero_node,
11946 /* If this is an NE comparison of zero with an AND of one, remove the
11947 comparison since the AND will give the correct value. */
11948 if (code == NE_EXPR
11949 && integer_zerop (arg1)
11950 && TREE_CODE (arg0) == BIT_AND_EXPR
11951 && integer_onep (TREE_OPERAND (arg0, 1)))
11952 return fold_convert (type, arg0);
11954 /* If we have (A & C) == C where C is a power of 2, convert this into
11955 (A & C) != 0. Similarly for NE_EXPR. */
11956 if (TREE_CODE (arg0) == BIT_AND_EXPR
11957 && integer_pow2p (TREE_OPERAND (arg0, 1))
11958 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11959 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11960 arg0, fold_convert (TREE_TYPE (arg0),
11961 integer_zero_node));
11963 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11964 bit, then fold the expression into A < 0 or A >= 0. */
11965 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11969 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11970 Similarly for NE_EXPR. */
11971 if (TREE_CODE (arg0) == BIT_AND_EXPR
11972 && TREE_CODE (arg1) == INTEGER_CST
11973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11975 tree notc = fold_build1 (BIT_NOT_EXPR,
11976 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11977 TREE_OPERAND (arg0, 1));
11978 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11980 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11981 if (integer_nonzerop (dandnotc))
11982 return omit_one_operand (type, rslt, arg0);
11985 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11986 Similarly for NE_EXPR. */
11987 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11988 && TREE_CODE (arg1) == INTEGER_CST
11989 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11991 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11992 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11993 TREE_OPERAND (arg0, 1), notd);
11994 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11995 if (integer_nonzerop (candnotd))
11996 return omit_one_operand (type, rslt, arg0);
11999 /* Optimize comparisons of strlen vs zero to a compare of the
12000 first character of the string vs zero. To wit,
12001 strlen(ptr) == 0 => *ptr == 0
12002 strlen(ptr) != 0 => *ptr != 0
12003 Other cases should reduce to one of these two (or a constant)
12004 due to the return value of strlen being unsigned. */
12005 if (TREE_CODE (arg0) == CALL_EXPR
12006 && integer_zerop (arg1))
12008 tree fndecl = get_callee_fndecl (arg0);
12011 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12012 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12013 && call_expr_nargs (arg0) == 1
12014 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12016 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12017 return fold_build2 (code, type, iref,
12018 build_int_cst (TREE_TYPE (iref), 0));
12022 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12023 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12024 if (TREE_CODE (arg0) == RSHIFT_EXPR
12025 && integer_zerop (arg1)
12026 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12028 tree arg00 = TREE_OPERAND (arg0, 0);
12029 tree arg01 = TREE_OPERAND (arg0, 1);
12030 tree itype = TREE_TYPE (arg00);
12031 if (TREE_INT_CST_HIGH (arg01) == 0
12032 && TREE_INT_CST_LOW (arg01)
12033 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12035 if (TYPE_UNSIGNED (itype))
12037 itype = signed_type_for (itype);
12038 arg00 = fold_convert (itype, arg00);
12040 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12041 type, arg00, build_int_cst (itype, 0));
12045 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12046 if (integer_zerop (arg1)
12047 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12048 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12049 TREE_OPERAND (arg0, 1));
12051 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12052 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12053 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12054 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12055 build_int_cst (TREE_TYPE (arg1), 0));
12056 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12057 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12058 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12059 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12060 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12061 build_int_cst (TREE_TYPE (arg1), 0));
12063 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12064 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12065 && TREE_CODE (arg1) == INTEGER_CST
12066 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12067 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12068 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12069 TREE_OPERAND (arg0, 1), arg1));
12071 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12072 (X & C) == 0 when C is a single bit. */
12073 if (TREE_CODE (arg0) == BIT_AND_EXPR
12074 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12075 && integer_zerop (arg1)
12076 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12078 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12079 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12080 TREE_OPERAND (arg0, 1));
12081 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12085 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12086 constant C is a power of two, i.e. a single bit. */
12087 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12088 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12089 && integer_zerop (arg1)
12090 && integer_pow2p (TREE_OPERAND (arg0, 1))
12091 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12092 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12094 tree arg00 = TREE_OPERAND (arg0, 0);
12095 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12096 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12099 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12100 when is C is a power of two, i.e. a single bit. */
12101 if (TREE_CODE (arg0) == BIT_AND_EXPR
12102 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12103 && integer_zerop (arg1)
12104 && integer_pow2p (TREE_OPERAND (arg0, 1))
12105 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12106 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12108 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12109 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12110 arg000, TREE_OPERAND (arg0, 1));
12111 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12112 tem, build_int_cst (TREE_TYPE (tem), 0));
12115 if (integer_zerop (arg1)
12116 && tree_expr_nonzero_p (arg0))
12118 tree res = constant_boolean_node (code==NE_EXPR, type);
12119 return omit_one_operand (type, res, arg0);
12122 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12123 if (TREE_CODE (arg0) == NEGATE_EXPR
12124 && TREE_CODE (arg1) == NEGATE_EXPR)
12125 return fold_build2 (code, type,
12126 TREE_OPERAND (arg0, 0),
12127 TREE_OPERAND (arg1, 0));
12129 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12130 if (TREE_CODE (arg0) == BIT_AND_EXPR
12131 && TREE_CODE (arg1) == BIT_AND_EXPR)
12133 tree arg00 = TREE_OPERAND (arg0, 0);
12134 tree arg01 = TREE_OPERAND (arg0, 1);
12135 tree arg10 = TREE_OPERAND (arg1, 0);
12136 tree arg11 = TREE_OPERAND (arg1, 1);
12137 tree itype = TREE_TYPE (arg0);
12139 if (operand_equal_p (arg01, arg11, 0))
12140 return fold_build2 (code, type,
12141 fold_build2 (BIT_AND_EXPR, itype,
12142 fold_build2 (BIT_XOR_EXPR, itype,
12145 build_int_cst (itype, 0));
12147 if (operand_equal_p (arg01, arg10, 0))
12148 return fold_build2 (code, type,
12149 fold_build2 (BIT_AND_EXPR, itype,
12150 fold_build2 (BIT_XOR_EXPR, itype,
12153 build_int_cst (itype, 0));
12155 if (operand_equal_p (arg00, arg11, 0))
12156 return fold_build2 (code, type,
12157 fold_build2 (BIT_AND_EXPR, itype,
12158 fold_build2 (BIT_XOR_EXPR, itype,
12161 build_int_cst (itype, 0));
12163 if (operand_equal_p (arg00, arg10, 0))
12164 return fold_build2 (code, type,
12165 fold_build2 (BIT_AND_EXPR, itype,
12166 fold_build2 (BIT_XOR_EXPR, itype,
12169 build_int_cst (itype, 0));
12172 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12173 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12175 tree arg00 = TREE_OPERAND (arg0, 0);
12176 tree arg01 = TREE_OPERAND (arg0, 1);
12177 tree arg10 = TREE_OPERAND (arg1, 0);
12178 tree arg11 = TREE_OPERAND (arg1, 1);
12179 tree itype = TREE_TYPE (arg0);
12181 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12182 operand_equal_p guarantees no side-effects so we don't need
12183 to use omit_one_operand on Z. */
12184 if (operand_equal_p (arg01, arg11, 0))
12185 return fold_build2 (code, type, arg00, arg10);
12186 if (operand_equal_p (arg01, arg10, 0))
12187 return fold_build2 (code, type, arg00, arg11);
12188 if (operand_equal_p (arg00, arg11, 0))
12189 return fold_build2 (code, type, arg01, arg10);
12190 if (operand_equal_p (arg00, arg10, 0))
12191 return fold_build2 (code, type, arg01, arg11);
12193 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12194 if (TREE_CODE (arg01) == INTEGER_CST
12195 && TREE_CODE (arg11) == INTEGER_CST)
12196 return fold_build2 (code, type,
12197 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12198 fold_build2 (BIT_XOR_EXPR, itype,
12203 /* Attempt to simplify equality/inequality comparisons of complex
12204 values. Only lower the comparison if the result is known or
12205 can be simplified to a single scalar comparison. */
12206 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12207 || TREE_CODE (arg0) == COMPLEX_CST)
12208 && (TREE_CODE (arg1) == COMPLEX_EXPR
12209 || TREE_CODE (arg1) == COMPLEX_CST))
12211 tree real0, imag0, real1, imag1;
12214 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12216 real0 = TREE_OPERAND (arg0, 0);
12217 imag0 = TREE_OPERAND (arg0, 1);
12221 real0 = TREE_REALPART (arg0);
12222 imag0 = TREE_IMAGPART (arg0);
12225 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12227 real1 = TREE_OPERAND (arg1, 0);
12228 imag1 = TREE_OPERAND (arg1, 1);
12232 real1 = TREE_REALPART (arg1);
12233 imag1 = TREE_IMAGPART (arg1);
12236 rcond = fold_binary (code, type, real0, real1);
12237 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12239 if (integer_zerop (rcond))
12241 if (code == EQ_EXPR)
12242 return omit_two_operands (type, boolean_false_node,
12244 return fold_build2 (NE_EXPR, type, imag0, imag1);
12248 if (code == NE_EXPR)
12249 return omit_two_operands (type, boolean_true_node,
12251 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12255 icond = fold_binary (code, type, imag0, imag1);
12256 if (icond && TREE_CODE (icond) == INTEGER_CST)
12258 if (integer_zerop (icond))
12260 if (code == EQ_EXPR)
12261 return omit_two_operands (type, boolean_false_node,
12263 return fold_build2 (NE_EXPR, type, real0, real1);
12267 if (code == NE_EXPR)
12268 return omit_two_operands (type, boolean_true_node,
12270 return fold_build2 (EQ_EXPR, type, real0, real1);
12281 tem = fold_comparison (code, type, op0, op1);
12282 if (tem != NULL_TREE)
12285 /* Transform comparisons of the form X +- C CMP X. */
12286 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12287 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12288 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12289 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12290 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12291 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12293 tree arg01 = TREE_OPERAND (arg0, 1);
12294 enum tree_code code0 = TREE_CODE (arg0);
12297 if (TREE_CODE (arg01) == REAL_CST)
12298 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12300 is_positive = tree_int_cst_sgn (arg01);
12302 /* (X - c) > X becomes false. */
12303 if (code == GT_EXPR
12304 && ((code0 == MINUS_EXPR && is_positive >= 0)
12305 || (code0 == PLUS_EXPR && is_positive <= 0)))
12307 if (TREE_CODE (arg01) == INTEGER_CST
12308 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12309 fold_overflow_warning (("assuming signed overflow does not "
12310 "occur when assuming that (X - c) > X "
12311 "is always false"),
12312 WARN_STRICT_OVERFLOW_ALL);
12313 return constant_boolean_node (0, type);
12316 /* Likewise (X + c) < X becomes false. */
12317 if (code == LT_EXPR
12318 && ((code0 == PLUS_EXPR && is_positive >= 0)
12319 || (code0 == MINUS_EXPR && is_positive <= 0)))
12321 if (TREE_CODE (arg01) == INTEGER_CST
12322 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12323 fold_overflow_warning (("assuming signed overflow does not "
12324 "occur when assuming that "
12325 "(X + c) < X is always false"),
12326 WARN_STRICT_OVERFLOW_ALL);
12327 return constant_boolean_node (0, type);
12330 /* Convert (X - c) <= X to true. */
12331 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12333 && ((code0 == MINUS_EXPR && is_positive >= 0)
12334 || (code0 == PLUS_EXPR && is_positive <= 0)))
12336 if (TREE_CODE (arg01) == INTEGER_CST
12337 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12338 fold_overflow_warning (("assuming signed overflow does not "
12339 "occur when assuming that "
12340 "(X - c) <= X is always true"),
12341 WARN_STRICT_OVERFLOW_ALL);
12342 return constant_boolean_node (1, type);
12345 /* Convert (X + c) >= X to true. */
12346 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12348 && ((code0 == PLUS_EXPR && is_positive >= 0)
12349 || (code0 == MINUS_EXPR && is_positive <= 0)))
12351 if (TREE_CODE (arg01) == INTEGER_CST
12352 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12353 fold_overflow_warning (("assuming signed overflow does not "
12354 "occur when assuming that "
12355 "(X + c) >= X is always true"),
12356 WARN_STRICT_OVERFLOW_ALL);
12357 return constant_boolean_node (1, type);
12360 if (TREE_CODE (arg01) == INTEGER_CST)
12362 /* Convert X + c > X and X - c < X to true for integers. */
12363 if (code == GT_EXPR
12364 && ((code0 == PLUS_EXPR && is_positive > 0)
12365 || (code0 == MINUS_EXPR && is_positive < 0)))
12367 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12368 fold_overflow_warning (("assuming signed overflow does "
12369 "not occur when assuming that "
12370 "(X + c) > X is always true"),
12371 WARN_STRICT_OVERFLOW_ALL);
12372 return constant_boolean_node (1, type);
12375 if (code == LT_EXPR
12376 && ((code0 == MINUS_EXPR && is_positive > 0)
12377 || (code0 == PLUS_EXPR && is_positive < 0)))
12379 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12380 fold_overflow_warning (("assuming signed overflow does "
12381 "not occur when assuming that "
12382 "(X - c) < X is always true"),
12383 WARN_STRICT_OVERFLOW_ALL);
12384 return constant_boolean_node (1, type);
12387 /* Convert X + c <= X and X - c >= X to false for integers. */
12388 if (code == LE_EXPR
12389 && ((code0 == PLUS_EXPR && is_positive > 0)
12390 || (code0 == MINUS_EXPR && is_positive < 0)))
12392 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12393 fold_overflow_warning (("assuming signed overflow does "
12394 "not occur when assuming that "
12395 "(X + c) <= X is always false"),
12396 WARN_STRICT_OVERFLOW_ALL);
12397 return constant_boolean_node (0, type);
12400 if (code == GE_EXPR
12401 && ((code0 == MINUS_EXPR && is_positive > 0)
12402 || (code0 == PLUS_EXPR && is_positive < 0)))
12404 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12405 fold_overflow_warning (("assuming signed overflow does "
12406 "not occur when assuming that "
12407 "(X - c) >= X is always false"),
12408 WARN_STRICT_OVERFLOW_ALL);
12409 return constant_boolean_node (0, type);
12414 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12415 This transformation affects the cases which are handled in later
12416 optimizations involving comparisons with non-negative constants. */
12417 if (TREE_CODE (arg1) == INTEGER_CST
12418 && TREE_CODE (arg0) != INTEGER_CST
12419 && tree_int_cst_sgn (arg1) > 0)
12421 if (code == GE_EXPR)
12423 arg1 = const_binop (MINUS_EXPR, arg1,
12424 build_int_cst (TREE_TYPE (arg1), 1), 0);
12425 return fold_build2 (GT_EXPR, type, arg0,
12426 fold_convert (TREE_TYPE (arg0), arg1));
12428 if (code == LT_EXPR)
12430 arg1 = const_binop (MINUS_EXPR, arg1,
12431 build_int_cst (TREE_TYPE (arg1), 1), 0);
12432 return fold_build2 (LE_EXPR, type, arg0,
12433 fold_convert (TREE_TYPE (arg0), arg1));
12437 /* Comparisons with the highest or lowest possible integer of
12438 the specified precision will have known values. */
12440 tree arg1_type = TREE_TYPE (arg1);
12441 unsigned int width = TYPE_PRECISION (arg1_type);
12443 if (TREE_CODE (arg1) == INTEGER_CST
12444 && !TREE_OVERFLOW (arg1)
12445 && width <= 2 * HOST_BITS_PER_WIDE_INT
12446 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12448 HOST_WIDE_INT signed_max_hi;
12449 unsigned HOST_WIDE_INT signed_max_lo;
12450 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12452 if (width <= HOST_BITS_PER_WIDE_INT)
12454 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12459 if (TYPE_UNSIGNED (arg1_type))
12461 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12467 max_lo = signed_max_lo;
12468 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12474 width -= HOST_BITS_PER_WIDE_INT;
12475 signed_max_lo = -1;
12476 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12481 if (TYPE_UNSIGNED (arg1_type))
12483 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12488 max_hi = signed_max_hi;
12489 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12493 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12494 && TREE_INT_CST_LOW (arg1) == max_lo)
12498 return omit_one_operand (type, integer_zero_node, arg0);
12501 return fold_build2 (EQ_EXPR, type, op0, op1);
12504 return omit_one_operand (type, integer_one_node, arg0);
12507 return fold_build2 (NE_EXPR, type, op0, op1);
12509 /* The GE_EXPR and LT_EXPR cases above are not normally
12510 reached because of previous transformations. */
12515 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12517 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12521 arg1 = const_binop (PLUS_EXPR, arg1,
12522 build_int_cst (TREE_TYPE (arg1), 1), 0);
12523 return fold_build2 (EQ_EXPR, type,
12524 fold_convert (TREE_TYPE (arg1), arg0),
12527 arg1 = const_binop (PLUS_EXPR, arg1,
12528 build_int_cst (TREE_TYPE (arg1), 1), 0);
12529 return fold_build2 (NE_EXPR, type,
12530 fold_convert (TREE_TYPE (arg1), arg0),
12535 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12537 && TREE_INT_CST_LOW (arg1) == min_lo)
12541 return omit_one_operand (type, integer_zero_node, arg0);
12544 return fold_build2 (EQ_EXPR, type, op0, op1);
12547 return omit_one_operand (type, integer_one_node, arg0);
12550 return fold_build2 (NE_EXPR, type, op0, op1);
12555 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12557 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12561 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12562 return fold_build2 (NE_EXPR, type,
12563 fold_convert (TREE_TYPE (arg1), arg0),
12566 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12567 return fold_build2 (EQ_EXPR, type,
12568 fold_convert (TREE_TYPE (arg1), arg0),
12574 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12575 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12576 && TYPE_UNSIGNED (arg1_type)
12577 /* We will flip the signedness of the comparison operator
12578 associated with the mode of arg1, so the sign bit is
12579 specified by this mode. Check that arg1 is the signed
12580 max associated with this sign bit. */
12581 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12582 /* signed_type does not work on pointer types. */
12583 && INTEGRAL_TYPE_P (arg1_type))
12585 /* The following case also applies to X < signed_max+1
12586 and X >= signed_max+1 because previous transformations. */
12587 if (code == LE_EXPR || code == GT_EXPR)
12590 st = signed_type_for (TREE_TYPE (arg1));
12591 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12592 type, fold_convert (st, arg0),
12593 build_int_cst (st, 0));
12599 /* If we are comparing an ABS_EXPR with a constant, we can
12600 convert all the cases into explicit comparisons, but they may
12601 well not be faster than doing the ABS and one comparison.
12602 But ABS (X) <= C is a range comparison, which becomes a subtraction
12603 and a comparison, and is probably faster. */
12604 if (code == LE_EXPR
12605 && TREE_CODE (arg1) == INTEGER_CST
12606 && TREE_CODE (arg0) == ABS_EXPR
12607 && ! TREE_SIDE_EFFECTS (arg0)
12608 && (0 != (tem = negate_expr (arg1)))
12609 && TREE_CODE (tem) == INTEGER_CST
12610 && !TREE_OVERFLOW (tem))
12611 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12612 build2 (GE_EXPR, type,
12613 TREE_OPERAND (arg0, 0), tem),
12614 build2 (LE_EXPR, type,
12615 TREE_OPERAND (arg0, 0), arg1));
12617 /* Convert ABS_EXPR<x> >= 0 to true. */
12618 strict_overflow_p = false;
12619 if (code == GE_EXPR
12620 && (integer_zerop (arg1)
12621 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12622 && real_zerop (arg1)))
12623 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12625 if (strict_overflow_p)
12626 fold_overflow_warning (("assuming signed overflow does not occur "
12627 "when simplifying comparison of "
12628 "absolute value and zero"),
12629 WARN_STRICT_OVERFLOW_CONDITIONAL);
12630 return omit_one_operand (type, integer_one_node, arg0);
12633 /* Convert ABS_EXPR<x> < 0 to false. */
12634 strict_overflow_p = false;
12635 if (code == LT_EXPR
12636 && (integer_zerop (arg1) || real_zerop (arg1))
12637 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12639 if (strict_overflow_p)
12640 fold_overflow_warning (("assuming signed overflow does not occur "
12641 "when simplifying comparison of "
12642 "absolute value and zero"),
12643 WARN_STRICT_OVERFLOW_CONDITIONAL);
12644 return omit_one_operand (type, integer_zero_node, arg0);
12647 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12648 and similarly for >= into !=. */
12649 if ((code == LT_EXPR || code == GE_EXPR)
12650 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12651 && TREE_CODE (arg1) == LSHIFT_EXPR
12652 && integer_onep (TREE_OPERAND (arg1, 0)))
12653 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12654 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12655 TREE_OPERAND (arg1, 1)),
12656 build_int_cst (TREE_TYPE (arg0), 0));
12658 if ((code == LT_EXPR || code == GE_EXPR)
12659 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12660 && CONVERT_EXPR_P (arg1)
12661 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12662 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12664 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12665 fold_convert (TREE_TYPE (arg0),
12666 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12667 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12669 build_int_cst (TREE_TYPE (arg0), 0));
12673 case UNORDERED_EXPR:
12681 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12683 t1 = fold_relational_const (code, type, arg0, arg1);
12684 if (t1 != NULL_TREE)
12688 /* If the first operand is NaN, the result is constant. */
12689 if (TREE_CODE (arg0) == REAL_CST
12690 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12691 && (code != LTGT_EXPR || ! flag_trapping_math))
12693 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12694 ? integer_zero_node
12695 : integer_one_node;
12696 return omit_one_operand (type, t1, arg1);
12699 /* If the second operand is NaN, the result is constant. */
12700 if (TREE_CODE (arg1) == REAL_CST
12701 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12702 && (code != LTGT_EXPR || ! flag_trapping_math))
12704 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12705 ? integer_zero_node
12706 : integer_one_node;
12707 return omit_one_operand (type, t1, arg0);
12710 /* Simplify unordered comparison of something with itself. */
12711 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12712 && operand_equal_p (arg0, arg1, 0))
12713 return constant_boolean_node (1, type);
12715 if (code == LTGT_EXPR
12716 && !flag_trapping_math
12717 && operand_equal_p (arg0, arg1, 0))
12718 return constant_boolean_node (0, type);
12720 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12722 tree targ0 = strip_float_extensions (arg0);
12723 tree targ1 = strip_float_extensions (arg1);
12724 tree newtype = TREE_TYPE (targ0);
12726 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12727 newtype = TREE_TYPE (targ1);
12729 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12730 return fold_build2 (code, type, fold_convert (newtype, targ0),
12731 fold_convert (newtype, targ1));
12736 case COMPOUND_EXPR:
12737 /* When pedantic, a compound expression can be neither an lvalue
12738 nor an integer constant expression. */
12739 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12741 /* Don't let (0, 0) be null pointer constant. */
12742 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12743 : fold_convert (type, arg1);
12744 return pedantic_non_lvalue (tem);
12747 if ((TREE_CODE (arg0) == REAL_CST
12748 && TREE_CODE (arg1) == REAL_CST)
12749 || (TREE_CODE (arg0) == INTEGER_CST
12750 && TREE_CODE (arg1) == INTEGER_CST))
12751 return build_complex (type, arg0, arg1);
12755 /* An ASSERT_EXPR should never be passed to fold_binary. */
12756 gcc_unreachable ();
12760 } /* switch (code) */
12763 /* Callback for walk_tree, looking for LABEL_EXPR.
12764 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12765 Do not check the sub-tree of GOTO_EXPR. */
12768 contains_label_1 (tree *tp,
12769 int *walk_subtrees,
12770 void *data ATTRIBUTE_UNUSED)
12772 switch (TREE_CODE (*tp))
12777 *walk_subtrees = 0;
12784 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12785 accessible from outside the sub-tree. Returns NULL_TREE if no
12786 addressable label is found. */
12789 contains_label_p (tree st)
12791 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12794 /* Fold a ternary expression of code CODE and type TYPE with operands
12795 OP0, OP1, and OP2. Return the folded expression if folding is
12796 successful. Otherwise, return NULL_TREE. */
12799 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12802 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12803 enum tree_code_class kind = TREE_CODE_CLASS (code);
12805 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12806 && TREE_CODE_LENGTH (code) == 3);
12808 /* Strip any conversions that don't change the mode. This is safe
12809 for every expression, except for a comparison expression because
12810 its signedness is derived from its operands. So, in the latter
12811 case, only strip conversions that don't change the signedness.
12813 Note that this is done as an internal manipulation within the
12814 constant folder, in order to find the simplest representation of
12815 the arguments so that their form can be studied. In any cases,
12816 the appropriate type conversions should be put back in the tree
12817 that will get out of the constant folder. */
12832 case COMPONENT_REF:
12833 if (TREE_CODE (arg0) == CONSTRUCTOR
12834 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12836 unsigned HOST_WIDE_INT idx;
12838 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12845 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12846 so all simple results must be passed through pedantic_non_lvalue. */
12847 if (TREE_CODE (arg0) == INTEGER_CST)
12849 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12850 tem = integer_zerop (arg0) ? op2 : op1;
12851 /* Only optimize constant conditions when the selected branch
12852 has the same type as the COND_EXPR. This avoids optimizing
12853 away "c ? x : throw", where the throw has a void type.
12854 Avoid throwing away that operand which contains label. */
12855 if ((!TREE_SIDE_EFFECTS (unused_op)
12856 || !contains_label_p (unused_op))
12857 && (! VOID_TYPE_P (TREE_TYPE (tem))
12858 || VOID_TYPE_P (type)))
12859 return pedantic_non_lvalue (tem);
12862 if (operand_equal_p (arg1, op2, 0))
12863 return pedantic_omit_one_operand (type, arg1, arg0);
12865 /* If we have A op B ? A : C, we may be able to convert this to a
12866 simpler expression, depending on the operation and the values
12867 of B and C. Signed zeros prevent all of these transformations,
12868 for reasons given above each one.
12870 Also try swapping the arguments and inverting the conditional. */
12871 if (COMPARISON_CLASS_P (arg0)
12872 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12873 arg1, TREE_OPERAND (arg0, 1))
12874 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12876 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12881 if (COMPARISON_CLASS_P (arg0)
12882 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12884 TREE_OPERAND (arg0, 1))
12885 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12887 tem = fold_truth_not_expr (arg0);
12888 if (tem && COMPARISON_CLASS_P (tem))
12890 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12896 /* If the second operand is simpler than the third, swap them
12897 since that produces better jump optimization results. */
12898 if (truth_value_p (TREE_CODE (arg0))
12899 && tree_swap_operands_p (op1, op2, false))
12901 /* See if this can be inverted. If it can't, possibly because
12902 it was a floating-point inequality comparison, don't do
12904 tem = fold_truth_not_expr (arg0);
12906 return fold_build3 (code, type, tem, op2, op1);
12909 /* Convert A ? 1 : 0 to simply A. */
12910 if (integer_onep (op1)
12911 && integer_zerop (op2)
12912 /* If we try to convert OP0 to our type, the
12913 call to fold will try to move the conversion inside
12914 a COND, which will recurse. In that case, the COND_EXPR
12915 is probably the best choice, so leave it alone. */
12916 && type == TREE_TYPE (arg0))
12917 return pedantic_non_lvalue (arg0);
12919 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12920 over COND_EXPR in cases such as floating point comparisons. */
12921 if (integer_zerop (op1)
12922 && integer_onep (op2)
12923 && truth_value_p (TREE_CODE (arg0)))
12924 return pedantic_non_lvalue (fold_convert (type,
12925 invert_truthvalue (arg0)));
12927 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12928 if (TREE_CODE (arg0) == LT_EXPR
12929 && integer_zerop (TREE_OPERAND (arg0, 1))
12930 && integer_zerop (op2)
12931 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12933 /* sign_bit_p only checks ARG1 bits within A's precision.
12934 If <sign bit of A> has wider type than A, bits outside
12935 of A's precision in <sign bit of A> need to be checked.
12936 If they are all 0, this optimization needs to be done
12937 in unsigned A's type, if they are all 1 in signed A's type,
12938 otherwise this can't be done. */
12939 if (TYPE_PRECISION (TREE_TYPE (tem))
12940 < TYPE_PRECISION (TREE_TYPE (arg1))
12941 && TYPE_PRECISION (TREE_TYPE (tem))
12942 < TYPE_PRECISION (type))
12944 unsigned HOST_WIDE_INT mask_lo;
12945 HOST_WIDE_INT mask_hi;
12946 int inner_width, outer_width;
12949 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12950 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12951 if (outer_width > TYPE_PRECISION (type))
12952 outer_width = TYPE_PRECISION (type);
12954 if (outer_width > HOST_BITS_PER_WIDE_INT)
12956 mask_hi = ((unsigned HOST_WIDE_INT) -1
12957 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12963 mask_lo = ((unsigned HOST_WIDE_INT) -1
12964 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12966 if (inner_width > HOST_BITS_PER_WIDE_INT)
12968 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12969 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12973 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12974 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12976 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12977 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12979 tem_type = signed_type_for (TREE_TYPE (tem));
12980 tem = fold_convert (tem_type, tem);
12982 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12983 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12985 tem_type = unsigned_type_for (TREE_TYPE (tem));
12986 tem = fold_convert (tem_type, tem);
12993 return fold_convert (type,
12994 fold_build2 (BIT_AND_EXPR,
12995 TREE_TYPE (tem), tem,
12996 fold_convert (TREE_TYPE (tem),
13000 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13001 already handled above. */
13002 if (TREE_CODE (arg0) == BIT_AND_EXPR
13003 && integer_onep (TREE_OPERAND (arg0, 1))
13004 && integer_zerop (op2)
13005 && integer_pow2p (arg1))
13007 tree tem = TREE_OPERAND (arg0, 0);
13009 if (TREE_CODE (tem) == RSHIFT_EXPR
13010 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13011 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13012 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13013 return fold_build2 (BIT_AND_EXPR, type,
13014 TREE_OPERAND (tem, 0), arg1);
13017 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13018 is probably obsolete because the first operand should be a
13019 truth value (that's why we have the two cases above), but let's
13020 leave it in until we can confirm this for all front-ends. */
13021 if (integer_zerop (op2)
13022 && TREE_CODE (arg0) == NE_EXPR
13023 && integer_zerop (TREE_OPERAND (arg0, 1))
13024 && integer_pow2p (arg1)
13025 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13026 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13027 arg1, OEP_ONLY_CONST))
13028 return pedantic_non_lvalue (fold_convert (type,
13029 TREE_OPERAND (arg0, 0)));
13031 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13032 if (integer_zerop (op2)
13033 && truth_value_p (TREE_CODE (arg0))
13034 && truth_value_p (TREE_CODE (arg1)))
13035 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13036 fold_convert (type, arg0),
13039 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13040 if (integer_onep (op2)
13041 && truth_value_p (TREE_CODE (arg0))
13042 && truth_value_p (TREE_CODE (arg1)))
13044 /* Only perform transformation if ARG0 is easily inverted. */
13045 tem = fold_truth_not_expr (arg0);
13047 return fold_build2 (TRUTH_ORIF_EXPR, type,
13048 fold_convert (type, tem),
13052 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13053 if (integer_zerop (arg1)
13054 && truth_value_p (TREE_CODE (arg0))
13055 && truth_value_p (TREE_CODE (op2)))
13057 /* Only perform transformation if ARG0 is easily inverted. */
13058 tem = fold_truth_not_expr (arg0);
13060 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13061 fold_convert (type, tem),
13065 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13066 if (integer_onep (arg1)
13067 && truth_value_p (TREE_CODE (arg0))
13068 && truth_value_p (TREE_CODE (op2)))
13069 return fold_build2 (TRUTH_ORIF_EXPR, type,
13070 fold_convert (type, arg0),
13076 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13077 of fold_ternary on them. */
13078 gcc_unreachable ();
13080 case BIT_FIELD_REF:
13081 if ((TREE_CODE (arg0) == VECTOR_CST
13082 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13083 && type == TREE_TYPE (TREE_TYPE (arg0)))
13085 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13086 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13089 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13090 && (idx % width) == 0
13091 && (idx = idx / width)
13092 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13094 tree elements = NULL_TREE;
13096 if (TREE_CODE (arg0) == VECTOR_CST)
13097 elements = TREE_VECTOR_CST_ELTS (arg0);
13100 unsigned HOST_WIDE_INT idx;
13103 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13104 elements = tree_cons (NULL_TREE, value, elements);
13106 while (idx-- > 0 && elements)
13107 elements = TREE_CHAIN (elements);
13109 return TREE_VALUE (elements);
13111 return fold_convert (type, integer_zero_node);
13118 } /* switch (code) */
13121 /* Perform constant folding and related simplification of EXPR.
13122 The related simplifications include x*1 => x, x*0 => 0, etc.,
13123 and application of the associative law.
13124 NOP_EXPR conversions may be removed freely (as long as we
13125 are careful not to change the type of the overall expression).
13126 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13127 but we can constant-fold them if they have constant operands. */
13129 #ifdef ENABLE_FOLD_CHECKING
13130 # define fold(x) fold_1 (x)
13131 static tree fold_1 (tree);
13137 const tree t = expr;
13138 enum tree_code code = TREE_CODE (t);
13139 enum tree_code_class kind = TREE_CODE_CLASS (code);
13142 /* Return right away if a constant. */
13143 if (kind == tcc_constant)
13146 /* CALL_EXPR-like objects with variable numbers of operands are
13147 treated specially. */
13148 if (kind == tcc_vl_exp)
13150 if (code == CALL_EXPR)
13152 tem = fold_call_expr (expr, false);
13153 return tem ? tem : expr;
13158 if (IS_EXPR_CODE_CLASS (kind)
13159 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13161 tree type = TREE_TYPE (t);
13162 tree op0, op1, op2;
13164 switch (TREE_CODE_LENGTH (code))
13167 op0 = TREE_OPERAND (t, 0);
13168 tem = fold_unary (code, type, op0);
13169 return tem ? tem : expr;
13171 op0 = TREE_OPERAND (t, 0);
13172 op1 = TREE_OPERAND (t, 1);
13173 tem = fold_binary (code, type, op0, op1);
13174 return tem ? tem : expr;
13176 op0 = TREE_OPERAND (t, 0);
13177 op1 = TREE_OPERAND (t, 1);
13178 op2 = TREE_OPERAND (t, 2);
13179 tem = fold_ternary (code, type, op0, op1, op2);
13180 return tem ? tem : expr;
13190 tree op0 = TREE_OPERAND (t, 0);
13191 tree op1 = TREE_OPERAND (t, 1);
13193 if (TREE_CODE (op1) == INTEGER_CST
13194 && TREE_CODE (op0) == CONSTRUCTOR
13195 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13197 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13198 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13199 unsigned HOST_WIDE_INT begin = 0;
13201 /* Find a matching index by means of a binary search. */
13202 while (begin != end)
13204 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13205 tree index = VEC_index (constructor_elt, elts, middle)->index;
13207 if (TREE_CODE (index) == INTEGER_CST
13208 && tree_int_cst_lt (index, op1))
13209 begin = middle + 1;
13210 else if (TREE_CODE (index) == INTEGER_CST
13211 && tree_int_cst_lt (op1, index))
13213 else if (TREE_CODE (index) == RANGE_EXPR
13214 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13215 begin = middle + 1;
13216 else if (TREE_CODE (index) == RANGE_EXPR
13217 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13220 return VEC_index (constructor_elt, elts, middle)->value;
13228 return fold (DECL_INITIAL (t));
13232 } /* switch (code) */
13235 #ifdef ENABLE_FOLD_CHECKING
13238 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13239 static void fold_check_failed (const_tree, const_tree);
13240 void print_fold_checksum (const_tree);
13242 /* When --enable-checking=fold, compute a digest of expr before
13243 and after actual fold call to see if fold did not accidentally
13244 change original expr. */
13250 struct md5_ctx ctx;
13251 unsigned char checksum_before[16], checksum_after[16];
13254 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13255 md5_init_ctx (&ctx);
13256 fold_checksum_tree (expr, &ctx, ht);
13257 md5_finish_ctx (&ctx, checksum_before);
13260 ret = fold_1 (expr);
13262 md5_init_ctx (&ctx);
13263 fold_checksum_tree (expr, &ctx, ht);
13264 md5_finish_ctx (&ctx, checksum_after);
13267 if (memcmp (checksum_before, checksum_after, 16))
13268 fold_check_failed (expr, ret);
13274 print_fold_checksum (const_tree expr)
13276 struct md5_ctx ctx;
13277 unsigned char checksum[16], cnt;
13280 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13281 md5_init_ctx (&ctx);
13282 fold_checksum_tree (expr, &ctx, ht);
13283 md5_finish_ctx (&ctx, checksum);
13285 for (cnt = 0; cnt < 16; ++cnt)
13286 fprintf (stderr, "%02x", checksum[cnt]);
13287 putc ('\n', stderr);
13291 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13293 internal_error ("fold check: original tree changed by fold");
13297 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13300 enum tree_code code;
13301 struct tree_function_decl buf;
13306 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13307 <= sizeof (struct tree_function_decl))
13308 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13311 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13315 code = TREE_CODE (expr);
13316 if (TREE_CODE_CLASS (code) == tcc_declaration
13317 && DECL_ASSEMBLER_NAME_SET_P (expr))
13319 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13320 memcpy ((char *) &buf, expr, tree_size (expr));
13321 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13322 expr = (tree) &buf;
13324 else if (TREE_CODE_CLASS (code) == tcc_type
13325 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13326 || TYPE_CACHED_VALUES_P (expr)
13327 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13329 /* Allow these fields to be modified. */
13331 memcpy ((char *) &buf, expr, tree_size (expr));
13332 expr = tmp = (tree) &buf;
13333 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13334 TYPE_POINTER_TO (tmp) = NULL;
13335 TYPE_REFERENCE_TO (tmp) = NULL;
13336 if (TYPE_CACHED_VALUES_P (tmp))
13338 TYPE_CACHED_VALUES_P (tmp) = 0;
13339 TYPE_CACHED_VALUES (tmp) = NULL;
13342 md5_process_bytes (expr, tree_size (expr), ctx);
13343 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13344 if (TREE_CODE_CLASS (code) != tcc_type
13345 && TREE_CODE_CLASS (code) != tcc_declaration
13346 && code != TREE_LIST
13347 && code != SSA_NAME)
13348 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13349 switch (TREE_CODE_CLASS (code))
13355 md5_process_bytes (TREE_STRING_POINTER (expr),
13356 TREE_STRING_LENGTH (expr), ctx);
13359 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13360 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13363 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13369 case tcc_exceptional:
13373 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13374 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13375 expr = TREE_CHAIN (expr);
13376 goto recursive_label;
13379 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13380 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13386 case tcc_expression:
13387 case tcc_reference:
13388 case tcc_comparison:
13391 case tcc_statement:
13393 len = TREE_OPERAND_LENGTH (expr);
13394 for (i = 0; i < len; ++i)
13395 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13397 case tcc_declaration:
13398 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13399 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13400 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13402 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13403 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13404 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13405 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13406 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13408 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13409 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13411 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13413 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13414 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13415 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13419 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13420 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13421 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13422 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13423 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13424 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13425 if (INTEGRAL_TYPE_P (expr)
13426 || SCALAR_FLOAT_TYPE_P (expr))
13428 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13429 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13431 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13432 if (TREE_CODE (expr) == RECORD_TYPE
13433 || TREE_CODE (expr) == UNION_TYPE
13434 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13435 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13436 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13443 /* Helper function for outputting the checksum of a tree T. When
13444 debugging with gdb, you can "define mynext" to be "next" followed
13445 by "call debug_fold_checksum (op0)", then just trace down till the
13449 debug_fold_checksum (const_tree t)
13452 unsigned char checksum[16];
13453 struct md5_ctx ctx;
13454 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13456 md5_init_ctx (&ctx);
13457 fold_checksum_tree (t, &ctx, ht);
13458 md5_finish_ctx (&ctx, checksum);
13461 for (i = 0; i < 16; i++)
13462 fprintf (stderr, "%d ", checksum[i]);
13464 fprintf (stderr, "\n");
13469 /* Fold a unary tree expression with code CODE of type TYPE with an
13470 operand OP0. Return a folded expression if successful. Otherwise,
13471 return a tree expression with code CODE of type TYPE with an
13475 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13478 #ifdef ENABLE_FOLD_CHECKING
13479 unsigned char checksum_before[16], checksum_after[16];
13480 struct md5_ctx ctx;
13483 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13484 md5_init_ctx (&ctx);
13485 fold_checksum_tree (op0, &ctx, ht);
13486 md5_finish_ctx (&ctx, checksum_before);
13490 tem = fold_unary (code, type, op0);
13492 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13494 #ifdef ENABLE_FOLD_CHECKING
13495 md5_init_ctx (&ctx);
13496 fold_checksum_tree (op0, &ctx, ht);
13497 md5_finish_ctx (&ctx, checksum_after);
13500 if (memcmp (checksum_before, checksum_after, 16))
13501 fold_check_failed (op0, tem);
13506 /* Fold a binary tree expression with code CODE of type TYPE with
13507 operands OP0 and OP1. Return a folded expression if successful.
13508 Otherwise, return a tree expression with code CODE of type TYPE
13509 with operands OP0 and OP1. */
13512 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13516 #ifdef ENABLE_FOLD_CHECKING
13517 unsigned char checksum_before_op0[16],
13518 checksum_before_op1[16],
13519 checksum_after_op0[16],
13520 checksum_after_op1[16];
13521 struct md5_ctx ctx;
13524 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13525 md5_init_ctx (&ctx);
13526 fold_checksum_tree (op0, &ctx, ht);
13527 md5_finish_ctx (&ctx, checksum_before_op0);
13530 md5_init_ctx (&ctx);
13531 fold_checksum_tree (op1, &ctx, ht);
13532 md5_finish_ctx (&ctx, checksum_before_op1);
13536 tem = fold_binary (code, type, op0, op1);
13538 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13540 #ifdef ENABLE_FOLD_CHECKING
13541 md5_init_ctx (&ctx);
13542 fold_checksum_tree (op0, &ctx, ht);
13543 md5_finish_ctx (&ctx, checksum_after_op0);
13546 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13547 fold_check_failed (op0, tem);
13549 md5_init_ctx (&ctx);
13550 fold_checksum_tree (op1, &ctx, ht);
13551 md5_finish_ctx (&ctx, checksum_after_op1);
13554 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13555 fold_check_failed (op1, tem);
13560 /* Fold a ternary tree expression with code CODE of type TYPE with
13561 operands OP0, OP1, and OP2. Return a folded expression if
13562 successful. Otherwise, return a tree expression with code CODE of
13563 type TYPE with operands OP0, OP1, and OP2. */
13566 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13570 #ifdef ENABLE_FOLD_CHECKING
13571 unsigned char checksum_before_op0[16],
13572 checksum_before_op1[16],
13573 checksum_before_op2[16],
13574 checksum_after_op0[16],
13575 checksum_after_op1[16],
13576 checksum_after_op2[16];
13577 struct md5_ctx ctx;
13580 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13581 md5_init_ctx (&ctx);
13582 fold_checksum_tree (op0, &ctx, ht);
13583 md5_finish_ctx (&ctx, checksum_before_op0);
13586 md5_init_ctx (&ctx);
13587 fold_checksum_tree (op1, &ctx, ht);
13588 md5_finish_ctx (&ctx, checksum_before_op1);
13591 md5_init_ctx (&ctx);
13592 fold_checksum_tree (op2, &ctx, ht);
13593 md5_finish_ctx (&ctx, checksum_before_op2);
13597 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13598 tem = fold_ternary (code, type, op0, op1, op2);
13600 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13602 #ifdef ENABLE_FOLD_CHECKING
13603 md5_init_ctx (&ctx);
13604 fold_checksum_tree (op0, &ctx, ht);
13605 md5_finish_ctx (&ctx, checksum_after_op0);
13608 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13609 fold_check_failed (op0, tem);
13611 md5_init_ctx (&ctx);
13612 fold_checksum_tree (op1, &ctx, ht);
13613 md5_finish_ctx (&ctx, checksum_after_op1);
13616 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13617 fold_check_failed (op1, tem);
13619 md5_init_ctx (&ctx);
13620 fold_checksum_tree (op2, &ctx, ht);
13621 md5_finish_ctx (&ctx, checksum_after_op2);
13624 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13625 fold_check_failed (op2, tem);
13630 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13631 arguments in ARGARRAY, and a null static chain.
13632 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13633 of type TYPE from the given operands as constructed by build_call_array. */
13636 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13639 #ifdef ENABLE_FOLD_CHECKING
13640 unsigned char checksum_before_fn[16],
13641 checksum_before_arglist[16],
13642 checksum_after_fn[16],
13643 checksum_after_arglist[16];
13644 struct md5_ctx ctx;
13648 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13649 md5_init_ctx (&ctx);
13650 fold_checksum_tree (fn, &ctx, ht);
13651 md5_finish_ctx (&ctx, checksum_before_fn);
13654 md5_init_ctx (&ctx);
13655 for (i = 0; i < nargs; i++)
13656 fold_checksum_tree (argarray[i], &ctx, ht);
13657 md5_finish_ctx (&ctx, checksum_before_arglist);
13661 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13663 #ifdef ENABLE_FOLD_CHECKING
13664 md5_init_ctx (&ctx);
13665 fold_checksum_tree (fn, &ctx, ht);
13666 md5_finish_ctx (&ctx, checksum_after_fn);
13669 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13670 fold_check_failed (fn, tem);
13672 md5_init_ctx (&ctx);
13673 for (i = 0; i < nargs; i++)
13674 fold_checksum_tree (argarray[i], &ctx, ht);
13675 md5_finish_ctx (&ctx, checksum_after_arglist);
13678 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13679 fold_check_failed (NULL_TREE, tem);
13684 /* Perform constant folding and related simplification of initializer
13685 expression EXPR. These behave identically to "fold_buildN" but ignore
13686 potential run-time traps and exceptions that fold must preserve. */
13688 #define START_FOLD_INIT \
13689 int saved_signaling_nans = flag_signaling_nans;\
13690 int saved_trapping_math = flag_trapping_math;\
13691 int saved_rounding_math = flag_rounding_math;\
13692 int saved_trapv = flag_trapv;\
13693 int saved_folding_initializer = folding_initializer;\
13694 flag_signaling_nans = 0;\
13695 flag_trapping_math = 0;\
13696 flag_rounding_math = 0;\
13698 folding_initializer = 1;
13700 #define END_FOLD_INIT \
13701 flag_signaling_nans = saved_signaling_nans;\
13702 flag_trapping_math = saved_trapping_math;\
13703 flag_rounding_math = saved_rounding_math;\
13704 flag_trapv = saved_trapv;\
13705 folding_initializer = saved_folding_initializer;
13708 fold_build1_initializer (enum tree_code code, tree type, tree op)
13713 result = fold_build1 (code, type, op);
13720 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13725 result = fold_build2 (code, type, op0, op1);
13732 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13738 result = fold_build3 (code, type, op0, op1, op2);
13745 fold_build_call_array_initializer (tree type, tree fn,
13746 int nargs, tree *argarray)
13751 result = fold_build_call_array (type, fn, nargs, argarray);
13757 #undef START_FOLD_INIT
13758 #undef END_FOLD_INIT
13760 /* Determine if first argument is a multiple of second argument. Return 0 if
13761 it is not, or we cannot easily determined it to be.
13763 An example of the sort of thing we care about (at this point; this routine
13764 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13765 fold cases do now) is discovering that
13767 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13773 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13775 This code also handles discovering that
13777 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13779 is a multiple of 8 so we don't have to worry about dealing with a
13780 possible remainder.
13782 Note that we *look* inside a SAVE_EXPR only to determine how it was
13783 calculated; it is not safe for fold to do much of anything else with the
13784 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13785 at run time. For example, the latter example above *cannot* be implemented
13786 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13787 evaluation time of the original SAVE_EXPR is not necessarily the same at
13788 the time the new expression is evaluated. The only optimization of this
13789 sort that would be valid is changing
13791 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13795 SAVE_EXPR (I) * SAVE_EXPR (J)
13797 (where the same SAVE_EXPR (J) is used in the original and the
13798 transformed version). */
13801 multiple_of_p (tree type, const_tree top, const_tree bottom)
13803 if (operand_equal_p (top, bottom, 0))
13806 if (TREE_CODE (type) != INTEGER_TYPE)
13809 switch (TREE_CODE (top))
13812 /* Bitwise and provides a power of two multiple. If the mask is
13813 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13814 if (!integer_pow2p (bottom))
13819 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13820 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13824 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13825 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13828 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13832 op1 = TREE_OPERAND (top, 1);
13833 /* const_binop may not detect overflow correctly,
13834 so check for it explicitly here. */
13835 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13836 > TREE_INT_CST_LOW (op1)
13837 && TREE_INT_CST_HIGH (op1) == 0
13838 && 0 != (t1 = fold_convert (type,
13839 const_binop (LSHIFT_EXPR,
13842 && !TREE_OVERFLOW (t1))
13843 return multiple_of_p (type, t1, bottom);
13848 /* Can't handle conversions from non-integral or wider integral type. */
13849 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13850 || (TYPE_PRECISION (type)
13851 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13854 /* .. fall through ... */
13857 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13860 if (TREE_CODE (bottom) != INTEGER_CST
13861 || integer_zerop (bottom)
13862 || (TYPE_UNSIGNED (type)
13863 && (tree_int_cst_sgn (top) < 0
13864 || tree_int_cst_sgn (bottom) < 0)))
13866 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13874 /* Return true if CODE or TYPE is known to be non-negative. */
13877 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13879 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13880 && truth_value_p (code))
13881 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13882 have a signed:1 type (where the value is -1 and 0). */
13887 /* Return true if (CODE OP0) is known to be non-negative. If the return
13888 value is based on the assumption that signed overflow is undefined,
13889 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13890 *STRICT_OVERFLOW_P. */
13893 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13894 bool *strict_overflow_p)
13896 if (TYPE_UNSIGNED (type))
13902 /* We can't return 1 if flag_wrapv is set because
13903 ABS_EXPR<INT_MIN> = INT_MIN. */
13904 if (!INTEGRAL_TYPE_P (type))
13906 if (TYPE_OVERFLOW_UNDEFINED (type))
13908 *strict_overflow_p = true;
13913 case NON_LVALUE_EXPR:
13915 case FIX_TRUNC_EXPR:
13916 return tree_expr_nonnegative_warnv_p (op0,
13917 strict_overflow_p);
13921 tree inner_type = TREE_TYPE (op0);
13922 tree outer_type = type;
13924 if (TREE_CODE (outer_type) == REAL_TYPE)
13926 if (TREE_CODE (inner_type) == REAL_TYPE)
13927 return tree_expr_nonnegative_warnv_p (op0,
13928 strict_overflow_p);
13929 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13931 if (TYPE_UNSIGNED (inner_type))
13933 return tree_expr_nonnegative_warnv_p (op0,
13934 strict_overflow_p);
13937 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13939 if (TREE_CODE (inner_type) == REAL_TYPE)
13940 return tree_expr_nonnegative_warnv_p (op0,
13941 strict_overflow_p);
13942 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13943 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13944 && TYPE_UNSIGNED (inner_type);
13950 return tree_simple_nonnegative_warnv_p (code, type);
13953 /* We don't know sign of `t', so be conservative and return false. */
13957 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13958 value is based on the assumption that signed overflow is undefined,
13959 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13960 *STRICT_OVERFLOW_P. */
13963 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13964 tree op1, bool *strict_overflow_p)
13966 if (TYPE_UNSIGNED (type))
13971 case POINTER_PLUS_EXPR:
13973 if (FLOAT_TYPE_P (type))
13974 return (tree_expr_nonnegative_warnv_p (op0,
13976 && tree_expr_nonnegative_warnv_p (op1,
13977 strict_overflow_p));
13979 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13980 both unsigned and at least 2 bits shorter than the result. */
13981 if (TREE_CODE (type) == INTEGER_TYPE
13982 && TREE_CODE (op0) == NOP_EXPR
13983 && TREE_CODE (op1) == NOP_EXPR)
13985 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13986 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13987 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13988 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13990 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13991 TYPE_PRECISION (inner2)) + 1;
13992 return prec < TYPE_PRECISION (type);
13998 if (FLOAT_TYPE_P (type))
14000 /* x * x for floating point x is always non-negative. */
14001 if (operand_equal_p (op0, op1, 0))
14003 return (tree_expr_nonnegative_warnv_p (op0,
14005 && tree_expr_nonnegative_warnv_p (op1,
14006 strict_overflow_p));
14009 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14010 both unsigned and their total bits is shorter than the result. */
14011 if (TREE_CODE (type) == INTEGER_TYPE
14012 && TREE_CODE (op0) == NOP_EXPR
14013 && TREE_CODE (op1) == NOP_EXPR)
14015 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14016 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14017 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14018 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14019 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14020 < TYPE_PRECISION (type);
14026 return (tree_expr_nonnegative_warnv_p (op0,
14028 || tree_expr_nonnegative_warnv_p (op1,
14029 strict_overflow_p));
14035 case TRUNC_DIV_EXPR:
14036 case CEIL_DIV_EXPR:
14037 case FLOOR_DIV_EXPR:
14038 case ROUND_DIV_EXPR:
14039 return (tree_expr_nonnegative_warnv_p (op0,
14041 && tree_expr_nonnegative_warnv_p (op1,
14042 strict_overflow_p));
14044 case TRUNC_MOD_EXPR:
14045 case CEIL_MOD_EXPR:
14046 case FLOOR_MOD_EXPR:
14047 case ROUND_MOD_EXPR:
14048 return tree_expr_nonnegative_warnv_p (op0,
14049 strict_overflow_p);
14051 return tree_simple_nonnegative_warnv_p (code, type);
14054 /* We don't know sign of `t', so be conservative and return false. */
14058 /* Return true if T is known to be non-negative. If the return
14059 value is based on the assumption that signed overflow is undefined,
14060 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14061 *STRICT_OVERFLOW_P. */
14064 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14066 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14069 switch (TREE_CODE (t))
14072 /* Query VRP to see if it has recorded any information about
14073 the range of this object. */
14074 return ssa_name_nonnegative_p (t);
14077 return tree_int_cst_sgn (t) >= 0;
14080 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14083 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14086 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14088 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14089 strict_overflow_p));
14091 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14094 /* We don't know sign of `t', so be conservative and return false. */
14098 /* Return true if T is known to be non-negative. If the return
14099 value is based on the assumption that signed overflow is undefined,
14100 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14101 *STRICT_OVERFLOW_P. */
14104 tree_call_nonnegative_warnv_p (enum tree_code code, tree type, tree fndecl,
14105 tree arg0, tree arg1, bool *strict_overflow_p)
14107 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14108 switch (DECL_FUNCTION_CODE (fndecl))
14110 CASE_FLT_FN (BUILT_IN_ACOS):
14111 CASE_FLT_FN (BUILT_IN_ACOSH):
14112 CASE_FLT_FN (BUILT_IN_CABS):
14113 CASE_FLT_FN (BUILT_IN_COSH):
14114 CASE_FLT_FN (BUILT_IN_ERFC):
14115 CASE_FLT_FN (BUILT_IN_EXP):
14116 CASE_FLT_FN (BUILT_IN_EXP10):
14117 CASE_FLT_FN (BUILT_IN_EXP2):
14118 CASE_FLT_FN (BUILT_IN_FABS):
14119 CASE_FLT_FN (BUILT_IN_FDIM):
14120 CASE_FLT_FN (BUILT_IN_HYPOT):
14121 CASE_FLT_FN (BUILT_IN_POW10):
14122 CASE_INT_FN (BUILT_IN_FFS):
14123 CASE_INT_FN (BUILT_IN_PARITY):
14124 CASE_INT_FN (BUILT_IN_POPCOUNT):
14125 case BUILT_IN_BSWAP32:
14126 case BUILT_IN_BSWAP64:
14130 CASE_FLT_FN (BUILT_IN_SQRT):
14131 /* sqrt(-0.0) is -0.0. */
14132 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14134 return tree_expr_nonnegative_warnv_p (arg0,
14135 strict_overflow_p);
14137 CASE_FLT_FN (BUILT_IN_ASINH):
14138 CASE_FLT_FN (BUILT_IN_ATAN):
14139 CASE_FLT_FN (BUILT_IN_ATANH):
14140 CASE_FLT_FN (BUILT_IN_CBRT):
14141 CASE_FLT_FN (BUILT_IN_CEIL):
14142 CASE_FLT_FN (BUILT_IN_ERF):
14143 CASE_FLT_FN (BUILT_IN_EXPM1):
14144 CASE_FLT_FN (BUILT_IN_FLOOR):
14145 CASE_FLT_FN (BUILT_IN_FMOD):
14146 CASE_FLT_FN (BUILT_IN_FREXP):
14147 CASE_FLT_FN (BUILT_IN_LCEIL):
14148 CASE_FLT_FN (BUILT_IN_LDEXP):
14149 CASE_FLT_FN (BUILT_IN_LFLOOR):
14150 CASE_FLT_FN (BUILT_IN_LLCEIL):
14151 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14152 CASE_FLT_FN (BUILT_IN_LLRINT):
14153 CASE_FLT_FN (BUILT_IN_LLROUND):
14154 CASE_FLT_FN (BUILT_IN_LRINT):
14155 CASE_FLT_FN (BUILT_IN_LROUND):
14156 CASE_FLT_FN (BUILT_IN_MODF):
14157 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14158 CASE_FLT_FN (BUILT_IN_RINT):
14159 CASE_FLT_FN (BUILT_IN_ROUND):
14160 CASE_FLT_FN (BUILT_IN_SCALB):
14161 CASE_FLT_FN (BUILT_IN_SCALBLN):
14162 CASE_FLT_FN (BUILT_IN_SCALBN):
14163 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14164 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14165 CASE_FLT_FN (BUILT_IN_SINH):
14166 CASE_FLT_FN (BUILT_IN_TANH):
14167 CASE_FLT_FN (BUILT_IN_TRUNC):
14168 /* True if the 1st argument is nonnegative. */
14169 return tree_expr_nonnegative_warnv_p (arg0,
14170 strict_overflow_p);
14172 CASE_FLT_FN (BUILT_IN_FMAX):
14173 /* True if the 1st OR 2nd arguments are nonnegative. */
14174 return (tree_expr_nonnegative_warnv_p (arg0,
14176 || (tree_expr_nonnegative_warnv_p (arg1,
14177 strict_overflow_p)));
14179 CASE_FLT_FN (BUILT_IN_FMIN):
14180 /* True if the 1st AND 2nd arguments are nonnegative. */
14181 return (tree_expr_nonnegative_warnv_p (arg0,
14183 && (tree_expr_nonnegative_warnv_p (arg1,
14184 strict_overflow_p)));
14186 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14187 /* True if the 2nd argument is nonnegative. */
14188 return tree_expr_nonnegative_warnv_p (arg1,
14189 strict_overflow_p);
14191 CASE_FLT_FN (BUILT_IN_POWI):
14192 /* True if the 1st argument is nonnegative or the second
14193 argument is an even integer. */
14194 if (TREE_CODE (arg1) == INTEGER_CST
14195 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14197 return tree_expr_nonnegative_warnv_p (arg0,
14198 strict_overflow_p);
14200 CASE_FLT_FN (BUILT_IN_POW):
14201 /* True if the 1st argument is nonnegative or the second
14202 argument is an even integer valued real. */
14203 if (TREE_CODE (arg1) == REAL_CST)
14208 c = TREE_REAL_CST (arg1);
14209 n = real_to_integer (&c);
14212 REAL_VALUE_TYPE cint;
14213 real_from_integer (&cint, VOIDmode, n,
14214 n < 0 ? -1 : 0, 0);
14215 if (real_identical (&c, &cint))
14219 return tree_expr_nonnegative_warnv_p (arg0,
14220 strict_overflow_p);
14225 return tree_simple_nonnegative_warnv_p (code,
14229 /* Return true if T is known to be non-negative. If the return
14230 value is based on the assumption that signed overflow is undefined,
14231 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14232 *STRICT_OVERFLOW_P. */
14235 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14237 enum tree_code code = TREE_CODE (t);
14238 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14245 tree temp = TARGET_EXPR_SLOT (t);
14246 t = TARGET_EXPR_INITIAL (t);
14248 /* If the initializer is non-void, then it's a normal expression
14249 that will be assigned to the slot. */
14250 if (!VOID_TYPE_P (t))
14251 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14253 /* Otherwise, the initializer sets the slot in some way. One common
14254 way is an assignment statement at the end of the initializer. */
14257 if (TREE_CODE (t) == BIND_EXPR)
14258 t = expr_last (BIND_EXPR_BODY (t));
14259 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14260 || TREE_CODE (t) == TRY_CATCH_EXPR)
14261 t = expr_last (TREE_OPERAND (t, 0));
14262 else if (TREE_CODE (t) == STATEMENT_LIST)
14267 if ((TREE_CODE (t) == MODIFY_EXPR
14268 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14269 && GENERIC_TREE_OPERAND (t, 0) == temp)
14270 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14271 strict_overflow_p);
14278 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14279 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14281 return tree_call_nonnegative_warnv_p (TREE_CODE (t),
14283 get_callee_fndecl (t),
14286 strict_overflow_p);
14288 case COMPOUND_EXPR:
14290 case GIMPLE_MODIFY_STMT:
14291 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14292 strict_overflow_p);
14294 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14295 strict_overflow_p);
14297 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14298 strict_overflow_p);
14301 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14305 /* We don't know sign of `t', so be conservative and return false. */
14309 /* Return true if T is known to be non-negative. If the return
14310 value is based on the assumption that signed overflow is undefined,
14311 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14312 *STRICT_OVERFLOW_P. */
14315 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14317 enum tree_code code;
14318 if (t == error_mark_node)
14321 code = TREE_CODE (t);
14322 switch (TREE_CODE_CLASS (code))
14325 case tcc_comparison:
14326 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14328 TREE_OPERAND (t, 0),
14329 TREE_OPERAND (t, 1),
14330 strict_overflow_p);
14333 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14335 TREE_OPERAND (t, 0),
14336 strict_overflow_p);
14339 case tcc_declaration:
14340 case tcc_reference:
14341 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14349 case TRUTH_AND_EXPR:
14350 case TRUTH_OR_EXPR:
14351 case TRUTH_XOR_EXPR:
14352 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14354 TREE_OPERAND (t, 0),
14355 TREE_OPERAND (t, 1),
14356 strict_overflow_p);
14357 case TRUTH_NOT_EXPR:
14358 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14360 TREE_OPERAND (t, 0),
14361 strict_overflow_p);
14368 case WITH_SIZE_EXPR:
14372 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14375 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14379 /* Return true if `t' is known to be non-negative. Handle warnings
14380 about undefined signed overflow. */
14383 tree_expr_nonnegative_p (tree t)
14385 bool ret, strict_overflow_p;
14387 strict_overflow_p = false;
14388 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14389 if (strict_overflow_p)
14390 fold_overflow_warning (("assuming signed overflow does not occur when "
14391 "determining that expression is always "
14393 WARN_STRICT_OVERFLOW_MISC);
14398 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14399 For floating point we further ensure that T is not denormal.
14400 Similar logic is present in nonzero_address in rtlanal.h.
14402 If the return value is based on the assumption that signed overflow
14403 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14404 change *STRICT_OVERFLOW_P. */
14407 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14408 bool *strict_overflow_p)
14413 return tree_expr_nonzero_warnv_p (op0,
14414 strict_overflow_p);
14418 tree inner_type = TREE_TYPE (op0);
14419 tree outer_type = type;
14421 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14422 && tree_expr_nonzero_warnv_p (op0,
14423 strict_overflow_p));
14427 case NON_LVALUE_EXPR:
14428 return tree_expr_nonzero_warnv_p (op0,
14429 strict_overflow_p);
14438 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14439 For floating point we further ensure that T is not denormal.
14440 Similar logic is present in nonzero_address in rtlanal.h.
14442 If the return value is based on the assumption that signed overflow
14443 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14444 change *STRICT_OVERFLOW_P. */
14447 tree_binary_nonzero_warnv_p (enum tree_code code,
14450 tree op1, bool *strict_overflow_p)
14452 bool sub_strict_overflow_p;
14455 case POINTER_PLUS_EXPR:
14457 if (TYPE_OVERFLOW_UNDEFINED (type))
14459 /* With the presence of negative values it is hard
14460 to say something. */
14461 sub_strict_overflow_p = false;
14462 if (!tree_expr_nonnegative_warnv_p (op0,
14463 &sub_strict_overflow_p)
14464 || !tree_expr_nonnegative_warnv_p (op1,
14465 &sub_strict_overflow_p))
14467 /* One of operands must be positive and the other non-negative. */
14468 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14469 overflows, on a twos-complement machine the sum of two
14470 nonnegative numbers can never be zero. */
14471 return (tree_expr_nonzero_warnv_p (op0,
14473 || tree_expr_nonzero_warnv_p (op1,
14474 strict_overflow_p));
14479 if (TYPE_OVERFLOW_UNDEFINED (type))
14481 if (tree_expr_nonzero_warnv_p (op0,
14483 && tree_expr_nonzero_warnv_p (op1,
14484 strict_overflow_p))
14486 *strict_overflow_p = true;
14493 sub_strict_overflow_p = false;
14494 if (tree_expr_nonzero_warnv_p (op0,
14495 &sub_strict_overflow_p)
14496 && tree_expr_nonzero_warnv_p (op1,
14497 &sub_strict_overflow_p))
14499 if (sub_strict_overflow_p)
14500 *strict_overflow_p = true;
14505 sub_strict_overflow_p = false;
14506 if (tree_expr_nonzero_warnv_p (op0,
14507 &sub_strict_overflow_p))
14509 if (sub_strict_overflow_p)
14510 *strict_overflow_p = true;
14512 /* When both operands are nonzero, then MAX must be too. */
14513 if (tree_expr_nonzero_warnv_p (op1,
14514 strict_overflow_p))
14517 /* MAX where operand 0 is positive is positive. */
14518 return tree_expr_nonnegative_warnv_p (op0,
14519 strict_overflow_p);
14521 /* MAX where operand 1 is positive is positive. */
14522 else if (tree_expr_nonzero_warnv_p (op1,
14523 &sub_strict_overflow_p)
14524 && tree_expr_nonnegative_warnv_p (op1,
14525 &sub_strict_overflow_p))
14527 if (sub_strict_overflow_p)
14528 *strict_overflow_p = true;
14534 return (tree_expr_nonzero_warnv_p (op1,
14536 || tree_expr_nonzero_warnv_p (op0,
14537 strict_overflow_p));
14546 /* Return true when T is an address and is known to be nonzero.
14547 For floating point we further ensure that T is not denormal.
14548 Similar logic is present in nonzero_address in rtlanal.h.
14550 If the return value is based on the assumption that signed overflow
14551 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14552 change *STRICT_OVERFLOW_P. */
14555 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14557 bool sub_strict_overflow_p;
14558 switch (TREE_CODE (t))
14561 /* Query VRP to see if it has recorded any information about
14562 the range of this object. */
14563 return ssa_name_nonzero_p (t);
14566 return !integer_zerop (t);
14570 tree base = get_base_address (TREE_OPERAND (t, 0));
14575 /* Weak declarations may link to NULL. */
14576 if (VAR_OR_FUNCTION_DECL_P (base))
14577 return !DECL_WEAK (base);
14579 /* Constants are never weak. */
14580 if (CONSTANT_CLASS_P (base))
14587 sub_strict_overflow_p = false;
14588 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14589 &sub_strict_overflow_p)
14590 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14591 &sub_strict_overflow_p))
14593 if (sub_strict_overflow_p)
14594 *strict_overflow_p = true;
14605 /* Return true when T is an address and is known to be nonzero.
14606 For floating point we further ensure that T is not denormal.
14607 Similar logic is present in nonzero_address in rtlanal.h.
14609 If the return value is based on the assumption that signed overflow
14610 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14611 change *STRICT_OVERFLOW_P. */
14614 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14616 tree type = TREE_TYPE (t);
14617 enum tree_code code;
14619 /* Doing something useful for floating point would need more work. */
14620 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14623 code = TREE_CODE (t);
14624 switch (TREE_CODE_CLASS (code))
14627 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14628 strict_overflow_p);
14630 case tcc_comparison:
14631 return tree_binary_nonzero_warnv_p (code, type,
14632 TREE_OPERAND (t, 0),
14633 TREE_OPERAND (t, 1),
14634 strict_overflow_p);
14636 case tcc_declaration:
14637 case tcc_reference:
14638 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14646 case TRUTH_NOT_EXPR:
14647 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14648 strict_overflow_p);
14650 case TRUTH_AND_EXPR:
14651 case TRUTH_OR_EXPR:
14652 case TRUTH_XOR_EXPR:
14653 return tree_binary_nonzero_warnv_p (code, type,
14654 TREE_OPERAND (t, 0),
14655 TREE_OPERAND (t, 1),
14656 strict_overflow_p);
14663 case WITH_SIZE_EXPR:
14667 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14669 case COMPOUND_EXPR:
14671 case GIMPLE_MODIFY_STMT:
14673 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14674 strict_overflow_p);
14677 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14678 strict_overflow_p);
14681 return alloca_call_p (t);
14689 /* Return true when T is an address and is known to be nonzero.
14690 Handle warnings about undefined signed overflow. */
14693 tree_expr_nonzero_p (tree t)
14695 bool ret, strict_overflow_p;
14697 strict_overflow_p = false;
14698 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14699 if (strict_overflow_p)
14700 fold_overflow_warning (("assuming signed overflow does not occur when "
14701 "determining that expression is always "
14703 WARN_STRICT_OVERFLOW_MISC);
14707 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14708 attempt to fold the expression to a constant without modifying TYPE,
14711 If the expression could be simplified to a constant, then return
14712 the constant. If the expression would not be simplified to a
14713 constant, then return NULL_TREE. */
14716 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14718 tree tem = fold_binary (code, type, op0, op1);
14719 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14722 /* Given the components of a unary expression CODE, TYPE and OP0,
14723 attempt to fold the expression to a constant without modifying
14726 If the expression could be simplified to a constant, then return
14727 the constant. If the expression would not be simplified to a
14728 constant, then return NULL_TREE. */
14731 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14733 tree tem = fold_unary (code, type, op0);
14734 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14737 /* If EXP represents referencing an element in a constant string
14738 (either via pointer arithmetic or array indexing), return the
14739 tree representing the value accessed, otherwise return NULL. */
14742 fold_read_from_constant_string (tree exp)
14744 if ((TREE_CODE (exp) == INDIRECT_REF
14745 || TREE_CODE (exp) == ARRAY_REF)
14746 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14748 tree exp1 = TREE_OPERAND (exp, 0);
14752 if (TREE_CODE (exp) == INDIRECT_REF)
14753 string = string_constant (exp1, &index);
14756 tree low_bound = array_ref_low_bound (exp);
14757 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14759 /* Optimize the special-case of a zero lower bound.
14761 We convert the low_bound to sizetype to avoid some problems
14762 with constant folding. (E.g. suppose the lower bound is 1,
14763 and its mode is QI. Without the conversion,l (ARRAY
14764 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14765 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14766 if (! integer_zerop (low_bound))
14767 index = size_diffop (index, fold_convert (sizetype, low_bound));
14773 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14774 && TREE_CODE (string) == STRING_CST
14775 && TREE_CODE (index) == INTEGER_CST
14776 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14777 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14779 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14780 return build_int_cst_type (TREE_TYPE (exp),
14781 (TREE_STRING_POINTER (string)
14782 [TREE_INT_CST_LOW (index)]));
14787 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14788 an integer constant, real, or fixed-point constant.
14790 TYPE is the type of the result. */
14793 fold_negate_const (tree arg0, tree type)
14795 tree t = NULL_TREE;
14797 switch (TREE_CODE (arg0))
14801 unsigned HOST_WIDE_INT low;
14802 HOST_WIDE_INT high;
14803 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14804 TREE_INT_CST_HIGH (arg0),
14806 t = force_fit_type_double (type, low, high, 1,
14807 (overflow | TREE_OVERFLOW (arg0))
14808 && !TYPE_UNSIGNED (type));
14813 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14818 FIXED_VALUE_TYPE f;
14819 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14820 &(TREE_FIXED_CST (arg0)), NULL,
14821 TYPE_SATURATING (type));
14822 t = build_fixed (type, f);
14823 /* Propagate overflow flags. */
14824 if (overflow_p | TREE_OVERFLOW (arg0))
14826 TREE_OVERFLOW (t) = 1;
14827 TREE_CONSTANT_OVERFLOW (t) = 1;
14829 else if (TREE_CONSTANT_OVERFLOW (arg0))
14830 TREE_CONSTANT_OVERFLOW (t) = 1;
14835 gcc_unreachable ();
14841 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14842 an integer constant or real constant.
14844 TYPE is the type of the result. */
14847 fold_abs_const (tree arg0, tree type)
14849 tree t = NULL_TREE;
14851 switch (TREE_CODE (arg0))
14854 /* If the value is unsigned, then the absolute value is
14855 the same as the ordinary value. */
14856 if (TYPE_UNSIGNED (type))
14858 /* Similarly, if the value is non-negative. */
14859 else if (INT_CST_LT (integer_minus_one_node, arg0))
14861 /* If the value is negative, then the absolute value is
14865 unsigned HOST_WIDE_INT low;
14866 HOST_WIDE_INT high;
14867 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14868 TREE_INT_CST_HIGH (arg0),
14870 t = force_fit_type_double (type, low, high, -1,
14871 overflow | TREE_OVERFLOW (arg0));
14876 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14877 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14883 gcc_unreachable ();
14889 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14890 constant. TYPE is the type of the result. */
14893 fold_not_const (tree arg0, tree type)
14895 tree t = NULL_TREE;
14897 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14899 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14900 ~TREE_INT_CST_HIGH (arg0), 0,
14901 TREE_OVERFLOW (arg0));
14906 /* Given CODE, a relational operator, the target type, TYPE and two
14907 constant operands OP0 and OP1, return the result of the
14908 relational operation. If the result is not a compile time
14909 constant, then return NULL_TREE. */
14912 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14914 int result, invert;
14916 /* From here on, the only cases we handle are when the result is
14917 known to be a constant. */
14919 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14921 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14922 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14924 /* Handle the cases where either operand is a NaN. */
14925 if (real_isnan (c0) || real_isnan (c1))
14935 case UNORDERED_EXPR:
14949 if (flag_trapping_math)
14955 gcc_unreachable ();
14958 return constant_boolean_node (result, type);
14961 return constant_boolean_node (real_compare (code, c0, c1), type);
14964 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14966 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14967 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14968 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14971 /* Handle equality/inequality of complex constants. */
14972 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14974 tree rcond = fold_relational_const (code, type,
14975 TREE_REALPART (op0),
14976 TREE_REALPART (op1));
14977 tree icond = fold_relational_const (code, type,
14978 TREE_IMAGPART (op0),
14979 TREE_IMAGPART (op1));
14980 if (code == EQ_EXPR)
14981 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14982 else if (code == NE_EXPR)
14983 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14988 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14990 To compute GT, swap the arguments and do LT.
14991 To compute GE, do LT and invert the result.
14992 To compute LE, swap the arguments, do LT and invert the result.
14993 To compute NE, do EQ and invert the result.
14995 Therefore, the code below must handle only EQ and LT. */
14997 if (code == LE_EXPR || code == GT_EXPR)
15002 code = swap_tree_comparison (code);
15005 /* Note that it is safe to invert for real values here because we
15006 have already handled the one case that it matters. */
15009 if (code == NE_EXPR || code == GE_EXPR)
15012 code = invert_tree_comparison (code, false);
15015 /* Compute a result for LT or EQ if args permit;
15016 Otherwise return T. */
15017 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15019 if (code == EQ_EXPR)
15020 result = tree_int_cst_equal (op0, op1);
15021 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15022 result = INT_CST_LT_UNSIGNED (op0, op1);
15024 result = INT_CST_LT (op0, op1);
15031 return constant_boolean_node (result, type);
15034 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15035 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15039 fold_build_cleanup_point_expr (tree type, tree expr)
15041 /* If the expression does not have side effects then we don't have to wrap
15042 it with a cleanup point expression. */
15043 if (!TREE_SIDE_EFFECTS (expr))
15046 /* If the expression is a return, check to see if the expression inside the
15047 return has no side effects or the right hand side of the modify expression
15048 inside the return. If either don't have side effects set we don't need to
15049 wrap the expression in a cleanup point expression. Note we don't check the
15050 left hand side of the modify because it should always be a return decl. */
15051 if (TREE_CODE (expr) == RETURN_EXPR)
15053 tree op = TREE_OPERAND (expr, 0);
15054 if (!op || !TREE_SIDE_EFFECTS (op))
15056 op = TREE_OPERAND (op, 1);
15057 if (!TREE_SIDE_EFFECTS (op))
15061 return build1 (CLEANUP_POINT_EXPR, type, expr);
15064 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15065 of an indirection through OP0, or NULL_TREE if no simplification is
15069 fold_indirect_ref_1 (tree type, tree op0)
15075 subtype = TREE_TYPE (sub);
15076 if (!POINTER_TYPE_P (subtype))
15079 if (TREE_CODE (sub) == ADDR_EXPR)
15081 tree op = TREE_OPERAND (sub, 0);
15082 tree optype = TREE_TYPE (op);
15083 /* *&CONST_DECL -> to the value of the const decl. */
15084 if (TREE_CODE (op) == CONST_DECL)
15085 return DECL_INITIAL (op);
15086 /* *&p => p; make sure to handle *&"str"[cst] here. */
15087 if (type == optype)
15089 tree fop = fold_read_from_constant_string (op);
15095 /* *(foo *)&fooarray => fooarray[0] */
15096 else if (TREE_CODE (optype) == ARRAY_TYPE
15097 && type == TREE_TYPE (optype))
15099 tree type_domain = TYPE_DOMAIN (optype);
15100 tree min_val = size_zero_node;
15101 if (type_domain && TYPE_MIN_VALUE (type_domain))
15102 min_val = TYPE_MIN_VALUE (type_domain);
15103 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15105 /* *(foo *)&complexfoo => __real__ complexfoo */
15106 else if (TREE_CODE (optype) == COMPLEX_TYPE
15107 && type == TREE_TYPE (optype))
15108 return fold_build1 (REALPART_EXPR, type, op);
15109 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15110 else if (TREE_CODE (optype) == VECTOR_TYPE
15111 && type == TREE_TYPE (optype))
15113 tree part_width = TYPE_SIZE (type);
15114 tree index = bitsize_int (0);
15115 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15119 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15120 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15121 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15123 tree op00 = TREE_OPERAND (sub, 0);
15124 tree op01 = TREE_OPERAND (sub, 1);
15128 op00type = TREE_TYPE (op00);
15129 if (TREE_CODE (op00) == ADDR_EXPR
15130 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15131 && type == TREE_TYPE (TREE_TYPE (op00type)))
15133 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15134 tree part_width = TYPE_SIZE (type);
15135 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15136 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15137 tree index = bitsize_int (indexi);
15139 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15140 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15141 part_width, index);
15147 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15148 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15149 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15151 tree op00 = TREE_OPERAND (sub, 0);
15152 tree op01 = TREE_OPERAND (sub, 1);
15156 op00type = TREE_TYPE (op00);
15157 if (TREE_CODE (op00) == ADDR_EXPR
15158 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15159 && type == TREE_TYPE (TREE_TYPE (op00type)))
15161 tree size = TYPE_SIZE_UNIT (type);
15162 if (tree_int_cst_equal (size, op01))
15163 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15167 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15168 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15169 && type == TREE_TYPE (TREE_TYPE (subtype)))
15172 tree min_val = size_zero_node;
15173 sub = build_fold_indirect_ref (sub);
15174 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15175 if (type_domain && TYPE_MIN_VALUE (type_domain))
15176 min_val = TYPE_MIN_VALUE (type_domain);
15177 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15183 /* Builds an expression for an indirection through T, simplifying some
15187 build_fold_indirect_ref (tree t)
15189 tree type = TREE_TYPE (TREE_TYPE (t));
15190 tree sub = fold_indirect_ref_1 (type, t);
15195 return build1 (INDIRECT_REF, type, t);
15198 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15201 fold_indirect_ref (tree t)
15203 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15211 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15212 whose result is ignored. The type of the returned tree need not be
15213 the same as the original expression. */
15216 fold_ignored_result (tree t)
15218 if (!TREE_SIDE_EFFECTS (t))
15219 return integer_zero_node;
15222 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15225 t = TREE_OPERAND (t, 0);
15229 case tcc_comparison:
15230 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15231 t = TREE_OPERAND (t, 0);
15232 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15233 t = TREE_OPERAND (t, 1);
15238 case tcc_expression:
15239 switch (TREE_CODE (t))
15241 case COMPOUND_EXPR:
15242 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15244 t = TREE_OPERAND (t, 0);
15248 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15249 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15251 t = TREE_OPERAND (t, 0);
15264 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15265 This can only be applied to objects of a sizetype. */
15268 round_up (tree value, int divisor)
15270 tree div = NULL_TREE;
15272 gcc_assert (divisor > 0);
15276 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15277 have to do anything. Only do this when we are not given a const,
15278 because in that case, this check is more expensive than just
15280 if (TREE_CODE (value) != INTEGER_CST)
15282 div = build_int_cst (TREE_TYPE (value), divisor);
15284 if (multiple_of_p (TREE_TYPE (value), value, div))
15288 /* If divisor is a power of two, simplify this to bit manipulation. */
15289 if (divisor == (divisor & -divisor))
15291 if (TREE_CODE (value) == INTEGER_CST)
15293 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15294 unsigned HOST_WIDE_INT high;
15297 if ((low & (divisor - 1)) == 0)
15300 overflow_p = TREE_OVERFLOW (value);
15301 high = TREE_INT_CST_HIGH (value);
15302 low &= ~(divisor - 1);
15311 return force_fit_type_double (TREE_TYPE (value), low, high,
15318 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15319 value = size_binop (PLUS_EXPR, value, t);
15320 t = build_int_cst (TREE_TYPE (value), -divisor);
15321 value = size_binop (BIT_AND_EXPR, value, t);
15327 div = build_int_cst (TREE_TYPE (value), divisor);
15328 value = size_binop (CEIL_DIV_EXPR, value, div);
15329 value = size_binop (MULT_EXPR, value, div);
15335 /* Likewise, but round down. */
15338 round_down (tree value, int divisor)
15340 tree div = NULL_TREE;
15342 gcc_assert (divisor > 0);
15346 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15347 have to do anything. Only do this when we are not given a const,
15348 because in that case, this check is more expensive than just
15350 if (TREE_CODE (value) != INTEGER_CST)
15352 div = build_int_cst (TREE_TYPE (value), divisor);
15354 if (multiple_of_p (TREE_TYPE (value), value, div))
15358 /* If divisor is a power of two, simplify this to bit manipulation. */
15359 if (divisor == (divisor & -divisor))
15363 t = build_int_cst (TREE_TYPE (value), -divisor);
15364 value = size_binop (BIT_AND_EXPR, value, t);
15369 div = build_int_cst (TREE_TYPE (value), divisor);
15370 value = size_binop (FLOOR_DIV_EXPR, value, div);
15371 value = size_binop (MULT_EXPR, value, div);
15377 /* Returns the pointer to the base of the object addressed by EXP and
15378 extracts the information about the offset of the access, storing it
15379 to PBITPOS and POFFSET. */
15382 split_address_to_core_and_offset (tree exp,
15383 HOST_WIDE_INT *pbitpos, tree *poffset)
15386 enum machine_mode mode;
15387 int unsignedp, volatilep;
15388 HOST_WIDE_INT bitsize;
15390 if (TREE_CODE (exp) == ADDR_EXPR)
15392 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15393 poffset, &mode, &unsignedp, &volatilep,
15395 core = fold_addr_expr (core);
15401 *poffset = NULL_TREE;
15407 /* Returns true if addresses of E1 and E2 differ by a constant, false
15408 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15411 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15414 HOST_WIDE_INT bitpos1, bitpos2;
15415 tree toffset1, toffset2, tdiff, type;
15417 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15418 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15420 if (bitpos1 % BITS_PER_UNIT != 0
15421 || bitpos2 % BITS_PER_UNIT != 0
15422 || !operand_equal_p (core1, core2, 0))
15425 if (toffset1 && toffset2)
15427 type = TREE_TYPE (toffset1);
15428 if (type != TREE_TYPE (toffset2))
15429 toffset2 = fold_convert (type, toffset2);
15431 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15432 if (!cst_and_fits_in_hwi (tdiff))
15435 *diff = int_cst_value (tdiff);
15437 else if (toffset1 || toffset2)
15439 /* If only one of the offsets is non-constant, the difference cannot
15446 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15450 /* Simplify the floating point expression EXP when the sign of the
15451 result is not significant. Return NULL_TREE if no simplification
15455 fold_strip_sign_ops (tree exp)
15459 switch (TREE_CODE (exp))
15463 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15464 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15468 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15470 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15471 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15472 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15473 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15474 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15475 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15478 case COMPOUND_EXPR:
15479 arg0 = TREE_OPERAND (exp, 0);
15480 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15482 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15486 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15487 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15489 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15490 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15491 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15496 const enum built_in_function fcode = builtin_mathfn_code (exp);
15499 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15500 /* Strip copysign function call, return the 1st argument. */
15501 arg0 = CALL_EXPR_ARG (exp, 0);
15502 arg1 = CALL_EXPR_ARG (exp, 1);
15503 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15506 /* Strip sign ops from the argument of "odd" math functions. */
15507 if (negate_mathfn_p (fcode))
15509 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15511 return build_call_expr (get_callee_fndecl (exp), 1, arg0);