1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
71 int folding_initializer = 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
95 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
96 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
97 static bool negate_mathfn_p (enum built_in_function);
98 static bool negate_expr_p (tree);
99 static tree negate_expr (tree);
100 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
101 static tree associate_trees (tree, tree, enum tree_code, tree);
102 static tree const_binop (enum tree_code, tree, tree, int);
103 static enum comparison_code comparison_to_compcode (enum tree_code);
104 static enum tree_code compcode_to_comparison (enum comparison_code);
105 static tree combine_comparisons (enum tree_code, enum tree_code,
106 enum tree_code, tree, tree, tree);
107 static int truth_value_p (enum tree_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *, bool *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
198 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 unsigned HOST_WIDE_INT low0 = l1;
201 HOST_WIDE_INT high0 = h1;
203 int sign_extended_type;
205 if (POINTER_TYPE_P (type)
206 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = TYPE_PRECISION (type);
211 /* Size types *are* sign extended. */
212 sign_extended_type = (!TYPE_UNSIGNED (type)
213 || (TREE_CODE (type) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 if (prec < HOST_BITS_PER_WIDE_INT)
225 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type)
230 /* No sign extension */;
231 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
232 /* Correct width already. */;
233 else if (prec > HOST_BITS_PER_WIDE_INT)
235 /* Sign extend top half? */
236 if (h1 & ((unsigned HOST_WIDE_INT)1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 else if (prec == HOST_BITS_PER_WIDE_INT)
242 if ((HOST_WIDE_INT)l1 < 0)
247 /* Sign extend bottom half? */
248 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 /* If the value didn't fit, signal overflow. */
259 return l1 != low0 || h1 != high0;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
278 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
279 HOST_WIDE_INT high, int overflowable,
282 int sign_extended_type;
285 /* Size types *are* sign extended. */
286 sign_extended_type = (!TYPE_UNSIGNED (type)
287 || (TREE_CODE (type) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type)));
290 overflow = fit_double_type (low, high, &low, &high, type);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed || overflow)
297 || (overflowable > 0 && sign_extended_type))
299 tree t = make_node (INTEGER_CST);
300 TREE_INT_CST_LOW (t) = low;
301 TREE_INT_CST_HIGH (t) = high;
302 TREE_TYPE (t) = type;
303 TREE_OVERFLOW (t) = 1;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type, low, high);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 unsigned HOST_WIDE_INT l;
328 h = h1 + h2 + (l < l1);
334 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352 return (*hv & h1) < 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
374 HOST_WIDE_INT arg1[4];
375 HOST_WIDE_INT arg2[4];
376 HOST_WIDE_INT prod[4 * 2];
377 unsigned HOST_WIDE_INT carry;
379 unsigned HOST_WIDE_INT toplow, neglow;
380 HOST_WIDE_INT tophigh, neghigh;
382 encode (arg1, l1, h1);
383 encode (arg2, l2, h2);
385 memset (prod, 0, sizeof prod);
387 for (i = 0; i < 4; i++)
390 for (j = 0; j < 4; j++)
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry += arg1[i] * arg2[j];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 prod[k] = LOWPART (carry);
398 carry = HIGHPART (carry);
403 decode (prod, lv, hv);
404 decode (prod + 4, &toplow, &tophigh);
406 /* Unsigned overflow is immediate. */
408 return (toplow | tophigh) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
414 neg_double (l2, h2, &neglow, &neghigh);
415 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
419 neg_double (l1, h1, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
433 HOST_WIDE_INT count, unsigned int prec,
434 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 unsigned HOST_WIDE_INT signmask;
440 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 if (SHIFT_COUNT_TRUNCATED)
447 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
454 else if (count >= HOST_BITS_PER_WIDE_INT)
456 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
461 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
462 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 /* Sign extend all bits that are beyond the precision. */
468 signmask = -((prec > HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT) *hv
470 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
471 : (*lv >> (prec - 1))) & 1);
473 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
478 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
483 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
484 *lv |= signmask << prec;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
495 HOST_WIDE_INT count, unsigned int prec,
496 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
499 unsigned HOST_WIDE_INT signmask;
502 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
505 if (SHIFT_COUNT_TRUNCATED)
508 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
515 else if (count >= HOST_BITS_PER_WIDE_INT)
518 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
522 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count >= (HOST_WIDE_INT)prec)
534 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
539 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
544 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
545 *lv |= signmask << (prec - count);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
556 HOST_WIDE_INT count, unsigned int prec,
557 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 unsigned HOST_WIDE_INT s1l, s2l;
560 HOST_WIDE_INT s1h, s2h;
566 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
567 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
578 HOST_WIDE_INT count, unsigned int prec,
579 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 unsigned HOST_WIDE_INT s1l, s2l;
582 HOST_WIDE_INT s1h, s2h;
588 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
589 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code, int uns,
605 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig,
607 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig,
609 unsigned HOST_WIDE_INT *lquo,
610 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den[4], quo[4];
617 unsigned HOST_WIDE_INT work;
618 unsigned HOST_WIDE_INT carry = 0;
619 unsigned HOST_WIDE_INT lnum = lnum_orig;
620 HOST_WIDE_INT hnum = hnum_orig;
621 unsigned HOST_WIDE_INT lden = lden_orig;
622 HOST_WIDE_INT hden = hden_orig;
625 if (hden == 0 && lden == 0)
626 overflow = 1, lden = 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum, hnum, &lnum, &hnum)
636 && ((HOST_WIDE_INT) lden & hden) == -1)
642 neg_double (lden, hden, &lden, &hden);
646 if (hnum == 0 && hden == 0)
647 { /* single precision */
649 /* This unsigned division rounds toward zero. */
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
663 memset (quo, 0, sizeof quo);
665 memset (num, 0, sizeof num); /* to zero 9th element */
666 memset (den, 0, sizeof den);
668 encode (num, lnum, hnum);
669 encode (den, lden, hden);
671 /* Special code for when the divisor < BASE. */
672 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 /* hnum != 0 already checked. */
675 for (i = 4 - 1; i >= 0; i--)
677 work = num[i] + carry * BASE;
678 quo[i] = work / lden;
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig, den_hi_sig;
687 unsigned HOST_WIDE_INT quo_est, scale;
689 /* Find the highest nonzero divisor digit. */
690 for (i = 4 - 1;; i--)
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale = BASE / (den[den_hi_sig] + 1);
702 { /* scale divisor and dividend */
704 for (i = 0; i <= 4 - 1; i++)
706 work = (num[i] * scale) + carry;
707 num[i] = LOWPART (work);
708 carry = HIGHPART (work);
713 for (i = 0; i <= 4 - 1; i++)
715 work = (den[i] * scale) + carry;
716 den[i] = LOWPART (work);
717 carry = HIGHPART (work);
718 if (den[i] != 0) den_hi_sig = i;
725 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp;
732 num_hi_sig = i + den_hi_sig + 1;
733 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
734 if (num[num_hi_sig] != den[den_hi_sig])
735 quo_est = work / den[den_hi_sig];
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp = work - quo_est * den[den_hi_sig];
742 && (den[den_hi_sig - 1] * quo_est
743 > (tmp * BASE + num[num_hi_sig - 2])))
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
751 for (j = 0; j <= den_hi_sig; j++)
753 work = quo_est * den[j] + carry;
754 carry = HIGHPART (work);
755 work = num[i + j] - LOWPART (work);
756 num[i + j] = LOWPART (work);
757 carry += HIGHPART (work) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 carry = 0; /* add divisor back in */
766 for (j = 0; j <= den_hi_sig; j++)
768 work = num[i + j] + den[j] + carry;
769 carry = HIGHPART (work);
770 num[i + j] = LOWPART (work);
773 num [num_hi_sig] += carry;
776 /* Store the quotient digit. */
781 decode (quo, lquo, hquo);
784 /* If result is negative, make it so. */
786 neg_double (*lquo, *hquo, lquo, hquo);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
790 neg_double (*lrem, *hrem, lrem, hrem);
791 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
796 case TRUNC_MOD_EXPR: /* round toward zero */
797 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 case FLOOR_MOD_EXPR: /* round toward negative infinity */
802 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
813 case CEIL_MOD_EXPR: /* round toward positive infinity */
814 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
824 case ROUND_MOD_EXPR: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem = *lrem;
827 HOST_WIDE_INT habs_rem = *hrem;
828 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
829 HOST_WIDE_INT habs_den = hden, htwice;
831 /* Get absolute values. */
833 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 neg_double (lden, hden, &labs_den, &habs_den);
837 /* If (2 * abs (lrem) >= abs (lden)) */
838 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
839 labs_rem, habs_rem, <wice, &htwice);
841 if (((unsigned HOST_WIDE_INT) habs_den
842 < (unsigned HOST_WIDE_INT) htwice)
843 || (((unsigned HOST_WIDE_INT) habs_den
844 == (unsigned HOST_WIDE_INT) htwice)
845 && (labs_den < ltwice)))
849 add_double (*lquo, *hquo,
850 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
853 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
867 neg_double (*lrem, *hrem, lrem, hrem);
868 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
877 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 unsigned HOST_WIDE_INT int1l, int2l;
880 HOST_WIDE_INT int1h, int2h;
881 unsigned HOST_WIDE_INT quol, reml;
882 HOST_WIDE_INT quoh, remh;
883 tree type = TREE_TYPE (arg1);
884 int uns = TYPE_UNSIGNED (type);
886 int1l = TREE_INT_CST_LOW (arg1);
887 int1h = TREE_INT_CST_HIGH (arg1);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type))
893 type = signed_type_for (type);
894 fit_double_type (int1l, int1h, &int1l, &int1h,
898 fit_double_type (int1l, int1h, &int1l, &int1h, type);
899 int2l = TREE_INT_CST_LOW (arg2);
900 int2h = TREE_INT_CST_HIGH (arg2);
902 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
903 &quol, &quoh, &reml, &remh);
904 if (remh != 0 || reml != 0)
907 return build_int_cst_wide (type, quol, quoh);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
919 static int fold_deferring_overflow_warnings;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
952 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
957 gcc_assert (fold_deferring_overflow_warnings > 0);
958 --fold_deferring_overflow_warnings;
959 if (fold_deferring_overflow_warnings > 0)
961 if (fold_deferred_overflow_warning != NULL
963 && code < (int) fold_deferred_overflow_code)
964 fold_deferred_overflow_code = code;
968 warnmsg = fold_deferred_overflow_warning;
969 fold_deferred_overflow_warning = NULL;
971 if (!issue || warnmsg == NULL)
974 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
977 /* Use the smallest code level when deciding to issue the
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1032 negate_mathfn_p (enum built_in_function code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1073 /* Check whether we may negate an integer constant T without causing
1077 may_negate_without_overflow_p (const_tree t)
1079 unsigned HOST_WIDE_INT val;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1108 negate_expr_p (tree t)
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1121 if (TYPE_OVERFLOW_WRAPS (type))
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1199 return negate_expr_p (tem);
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1262 tem = fold_negate_const (t, type);
1267 tree rpart = negate_expr (TREE_REALPART (t));
1268 tree ipart = negate_expr (TREE_IMAGPART (t));
1270 if ((TREE_CODE (rpart) == REAL_CST
1271 && TREE_CODE (ipart) == REAL_CST)
1272 || (TREE_CODE (rpart) == INTEGER_CST
1273 && TREE_CODE (ipart) == INTEGER_CST))
1274 return build_complex (type, rpart, ipart);
1279 if (negate_expr_p (t))
1280 return fold_build2 (COMPLEX_EXPR, type,
1281 fold_negate_expr (TREE_OPERAND (t, 0)),
1282 fold_negate_expr (TREE_OPERAND (t, 1)));
1286 if (negate_expr_p (t))
1287 return fold_build1 (CONJ_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)));
1292 return TREE_OPERAND (t, 0);
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t, 1))
1300 && reorder_operands_p (TREE_OPERAND (t, 0),
1301 TREE_OPERAND (t, 1)))
1303 tem = negate_expr (TREE_OPERAND (t, 1));
1304 return fold_build2 (MINUS_EXPR, type,
1305 tem, TREE_OPERAND (t, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 tem = negate_expr (TREE_OPERAND (t, 0));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 1));
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1322 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1323 return fold_build2 (MINUS_EXPR, type,
1324 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1328 if (TYPE_UNSIGNED (type))
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 tem = TREE_OPERAND (t, 1);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 TREE_OPERAND (t, 0), negate_expr (tem));
1340 tem = TREE_OPERAND (t, 0);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 negate_expr (tem), TREE_OPERAND (t, 1));
1347 case TRUNC_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 case FLOOR_DIV_EXPR:
1351 case EXACT_DIV_EXPR:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1357 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 const char * const warnmsg = G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem = TREE_OPERAND (t, 1);
1362 if (negate_expr_p (tem))
1364 if (INTEGRAL_TYPE_P (type)
1365 && (TREE_CODE (tem) != INTEGER_CST
1366 || integer_onep (tem)))
1367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1374 if (INTEGRAL_TYPE_P (type)
1375 && (TREE_CODE (tem) != INTEGER_CST
1376 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1377 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1378 return fold_build2 (TREE_CODE (t), type,
1379 negate_expr (tem), TREE_OPERAND (t, 1));
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type) == REAL_TYPE)
1388 tem = strip_float_extensions (t);
1389 if (tem != t && negate_expr_p (tem))
1390 return fold_convert (type, negate_expr (tem));
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t))
1397 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1401 fndecl = get_callee_fndecl (t);
1402 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1403 return build_call_expr (fndecl, 1, arg);
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 tree op1 = TREE_OPERAND (t, 1);
1412 if (TREE_INT_CST_HIGH (op1) == 0
1413 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1414 == TREE_INT_CST_LOW (op1))
1416 tree ntype = TYPE_UNSIGNED (type)
1417 ? signed_type_for (type)
1418 : unsigned_type_for (type);
1419 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1420 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1421 return fold_convert (type, temp);
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1438 negate_expr (tree t)
1445 type = TREE_TYPE (t);
1446 STRIP_SIGN_NOPS (t);
1448 tem = fold_negate_expr (t);
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 return fold_convert (type, tem);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1475 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1476 tree *minus_litp, int negate_p)
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in);
1487 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1488 || TREE_CODE (in) == FIXED_CST)
1490 else if (TREE_CODE (in) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1498 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 tree op0 = TREE_OPERAND (in, 0);
1501 tree op1 = TREE_OPERAND (in, 1);
1502 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1503 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1507 || TREE_CODE (op0) == FIXED_CST)
1508 *litp = op0, op0 = 0;
1509 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1510 || TREE_CODE (op1) == FIXED_CST)
1511 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513 if (op0 != 0 && TREE_CONSTANT (op0))
1514 *conp = op0, op0 = 0;
1515 else if (op1 != 0 && TREE_CONSTANT (op1))
1516 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0 != 0 && op1 != 0)
1525 var = op1, neg_var_p = neg1_p;
1527 /* Now do any needed negations. */
1529 *minus_litp = *litp, *litp = 0;
1531 *conp = negate_expr (*conp);
1533 var = negate_expr (var);
1535 else if (TREE_CONSTANT (in))
1543 *minus_litp = *litp, *litp = 0;
1544 else if (*minus_litp)
1545 *litp = *minus_litp, *minus_litp = 0;
1546 *conp = negate_expr (*conp);
1547 var = negate_expr (var);
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1558 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1569 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 if (code == PLUS_EXPR)
1573 if (TREE_CODE (t1) == NEGATE_EXPR)
1574 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1575 fold_convert (type, TREE_OPERAND (t1, 0)));
1576 else if (TREE_CODE (t2) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1578 fold_convert (type, TREE_OPERAND (t2, 0)));
1579 else if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1582 else if (code == MINUS_EXPR)
1584 if (integer_zerop (t2))
1585 return fold_convert (type, t1);
1588 return build2 (code, type, fold_convert (type, t1),
1589 fold_convert (type, t2));
1592 return fold_build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1600 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1602 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1604 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1619 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1620 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1621 && TYPE_MODE (type1) == TYPE_MODE (type2);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 unsigned HOST_WIDE_INT int1l, int2l;
1635 HOST_WIDE_INT int1h, int2h;
1636 unsigned HOST_WIDE_INT low;
1638 unsigned HOST_WIDE_INT garbagel;
1639 HOST_WIDE_INT garbageh;
1641 tree type = TREE_TYPE (arg1);
1642 int uns = TYPE_UNSIGNED (type);
1644 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1647 int1l = TREE_INT_CST_LOW (arg1);
1648 int1h = TREE_INT_CST_HIGH (arg1);
1649 int2l = TREE_INT_CST_LOW (arg2);
1650 int2h = TREE_INT_CST_HIGH (arg2);
1655 low = int1l | int2l, hi = int1h | int2h;
1659 low = int1l ^ int2l, hi = int1h ^ int2h;
1663 low = int1l & int2l, hi = int1h & int2h;
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1679 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1684 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1688 neg_double (int2l, int2h, &low, &hi);
1689 add_double (int1l, int1h, low, hi, &low, &hi);
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1694 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1697 case TRUNC_DIV_EXPR:
1698 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1699 case EXACT_DIV_EXPR:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1702 && !TREE_OVERFLOW (arg1)
1703 && !TREE_OVERFLOW (arg2)
1704 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 if (code == CEIL_DIV_EXPR)
1709 low = int1l / int2l, hi = 0;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR:
1716 if (int2h == 0 && int2l == 0)
1718 if (int2h == 0 && int2l == 1)
1720 low = int1l, hi = int1h;
1723 if (int1l == int2l && int1h == int2h
1724 && ! (int1l == 0 && int1h == 0))
1729 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1730 &low, &hi, &garbagel, &garbageh);
1733 case TRUNC_MOD_EXPR:
1734 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1737 && !TREE_OVERFLOW (arg1)
1738 && !TREE_OVERFLOW (arg2)
1739 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 if (code == CEIL_MOD_EXPR)
1743 low = int1l % int2l, hi = 0;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR:
1750 if (int2h == 0 && int2l == 0)
1752 overflow = div_and_round_double (code, uns,
1753 int1l, int1h, int2l, int2h,
1754 &garbagel, &garbageh, &low, &hi);
1760 low = (((unsigned HOST_WIDE_INT) int1h
1761 < (unsigned HOST_WIDE_INT) int2h)
1762 || (((unsigned HOST_WIDE_INT) int1h
1763 == (unsigned HOST_WIDE_INT) int2h)
1766 low = (int1h < int2h
1767 || (int1h == int2h && int1l < int2l));
1769 if (low == (code == MIN_EXPR))
1770 low = int1l, hi = int1h;
1772 low = int2l, hi = int2h;
1781 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1788 TREE_OVERFLOW (t) = 1;
1792 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1793 ((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1807 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 /* Sanity check for the recursive cases. */
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return int_const_binop (code, arg1, arg2, notrunc);
1819 if (TREE_CODE (arg1) == REAL_CST)
1821 enum machine_mode mode;
1824 REAL_VALUE_TYPE value;
1825 REAL_VALUE_TYPE result;
1829 /* The following codes are handled by real_arithmetic. */
1844 d1 = TREE_REAL_CST (arg1);
1845 d2 = TREE_REAL_CST (arg2);
1847 type = TREE_TYPE (arg1);
1848 mode = TYPE_MODE (type);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode)
1853 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code == RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2, dconst0)
1860 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1))
1867 else if (REAL_VALUE_ISNAN (d2))
1870 inexact = real_arithmetic (&value, code, &d1, &d2);
1871 real_convert (&result, mode, &value);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode)
1877 && REAL_VALUE_ISINF (result)
1878 && !REAL_VALUE_ISINF (d1)
1879 && !REAL_VALUE_ISINF (d2))
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1888 && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1907 /* The following codes are handled by fixed_arithmetic. */
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
2003 return build_complex (type, real, imag);
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2185 /* See if R is less than the lower bound or greater than the
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2202 tree ut = TYPE_MAX_VALUE (type);
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2230 double_int temp, temp_trunc;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2401 if (TREE_CODE (arg1) == INTEGER_CST)
2402 return fold_convert_const_int_from_int (type, arg1);
2403 else if (TREE_CODE (arg1) == REAL_CST)
2404 return fold_convert_const_int_from_real (code, type, arg1);
2405 else if (TREE_CODE (arg1) == FIXED_CST)
2406 return fold_convert_const_int_from_fixed (type, arg1);
2408 else if (TREE_CODE (type) == REAL_TYPE)
2410 if (TREE_CODE (arg1) == INTEGER_CST)
2411 return build_real_from_int_cst (type, arg1);
2412 else if (TREE_CODE (arg1) == REAL_CST)
2413 return fold_convert_const_real_from_real (type, arg1);
2414 else if (TREE_CODE (arg1) == FIXED_CST)
2415 return fold_convert_const_real_from_fixed (type, arg1);
2417 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2419 if (TREE_CODE (arg1) == FIXED_CST)
2420 return fold_convert_const_fixed_from_fixed (type, arg1);
2421 else if (TREE_CODE (arg1) == INTEGER_CST)
2422 return fold_convert_const_fixed_from_int (type, arg1);
2423 else if (TREE_CODE (arg1) == REAL_CST)
2424 return fold_convert_const_fixed_from_real (type, arg1);
2429 /* Construct a vector of zero elements of vector type TYPE. */
2432 build_zero_vector (tree type)
2437 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2438 units = TYPE_VECTOR_SUBPARTS (type);
2441 for (i = 0; i < units; i++)
2442 list = tree_cons (NULL_TREE, elem, list);
2443 return build_vector (type, list);
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2449 fold_convertible_p (const_tree type, const_tree arg)
2451 tree orig = TREE_TYPE (arg);
2456 if (TREE_CODE (arg) == ERROR_MARK
2457 || TREE_CODE (type) == ERROR_MARK
2458 || TREE_CODE (orig) == ERROR_MARK)
2461 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2464 switch (TREE_CODE (type))
2466 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2467 case POINTER_TYPE: case REFERENCE_TYPE:
2469 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2470 || TREE_CODE (orig) == OFFSET_TYPE)
2472 return (TREE_CODE (orig) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2476 case FIXED_POINT_TYPE:
2480 return TREE_CODE (type) == TREE_CODE (orig);
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2491 fold_convert (tree type, tree arg)
2493 tree orig = TREE_TYPE (arg);
2499 if (TREE_CODE (arg) == ERROR_MARK
2500 || TREE_CODE (type) == ERROR_MARK
2501 || TREE_CODE (orig) == ERROR_MARK)
2502 return error_mark_node;
2504 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2505 return fold_build1 (NOP_EXPR, type, arg);
2507 switch (TREE_CODE (type))
2509 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2510 case POINTER_TYPE: case REFERENCE_TYPE:
2512 if (TREE_CODE (arg) == INTEGER_CST)
2514 tem = fold_convert_const (NOP_EXPR, type, arg);
2515 if (tem != NULL_TREE)
2518 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2519 || TREE_CODE (orig) == OFFSET_TYPE)
2520 return fold_build1 (NOP_EXPR, type, arg);
2521 if (TREE_CODE (orig) == COMPLEX_TYPE)
2523 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2524 return fold_convert (type, tem);
2526 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2528 return fold_build1 (NOP_EXPR, type, arg);
2531 if (TREE_CODE (arg) == INTEGER_CST)
2533 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2534 if (tem != NULL_TREE)
2537 else if (TREE_CODE (arg) == REAL_CST)
2539 tem = fold_convert_const (NOP_EXPR, type, arg);
2540 if (tem != NULL_TREE)
2543 else if (TREE_CODE (arg) == FIXED_CST)
2545 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2546 if (tem != NULL_TREE)
2550 switch (TREE_CODE (orig))
2553 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2554 case POINTER_TYPE: case REFERENCE_TYPE:
2555 return fold_build1 (FLOAT_EXPR, type, arg);
2558 return fold_build1 (NOP_EXPR, type, arg);
2560 case FIXED_POINT_TYPE:
2561 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2571 case FIXED_POINT_TYPE:
2572 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2573 || TREE_CODE (arg) == REAL_CST)
2575 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2576 if (tem != NULL_TREE)
2580 switch (TREE_CODE (orig))
2582 case FIXED_POINT_TYPE:
2587 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2590 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2591 return fold_convert (type, tem);
2598 switch (TREE_CODE (orig))
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2604 case FIXED_POINT_TYPE:
2605 return build2 (COMPLEX_EXPR, type,
2606 fold_convert (TREE_TYPE (type), arg),
2607 fold_convert (TREE_TYPE (type), integer_zero_node));
2612 if (TREE_CODE (arg) == COMPLEX_EXPR)
2614 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2615 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2616 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2619 arg = save_expr (arg);
2620 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2621 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2622 rpart = fold_convert (TREE_TYPE (type), rpart);
2623 ipart = fold_convert (TREE_TYPE (type), ipart);
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2632 if (integer_zerop (arg))
2633 return build_zero_vector (type);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2636 || TREE_CODE (orig) == VECTOR_TYPE);
2637 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2640 tem = fold_ignored_result (arg);
2641 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2643 return fold_build1 (NOP_EXPR, type, tem);
2650 /* Return false if expr can be assumed not to be an lvalue, true
2654 maybe_lvalue_p (const_tree x)
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x))
2668 case ALIGN_INDIRECT_REF:
2669 case MISALIGNED_INDIRECT_REF:
2671 case ARRAY_RANGE_REF:
2677 case PREINCREMENT_EXPR:
2678 case PREDECREMENT_EXPR:
2680 case TRY_CATCH_EXPR:
2681 case WITH_CLEANUP_EXPR:
2684 case GIMPLE_MODIFY_STMT:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2712 if (! maybe_lvalue_p (x))
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2726 pedantic_non_lvalue (tree x)
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2742 if (honor_nans && flag_trapping_math)
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2784 swap_tree_comparison (enum tree_code code)
2791 case UNORDERED_EXPR:
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2843 return COMPCODE_UNLT;
2845 return COMPCODE_UNEQ;
2847 return COMPCODE_UNLE;
2849 return COMPCODE_UNGT;
2851 return COMPCODE_LTGT;
2853 return COMPCODE_UNGE;
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2941 else if (flag_trapping_math)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2988 truth_value_p (enum tree_code code)
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. As pointers
3037 strictly don't have a signedness, require either two pointers or
3038 two non-pointers as well. */
3039 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3040 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3043 /* If both types don't have the same precision, then it is not safe
3045 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3051 /* In case both args are comparisons but with different comparison
3052 code, try to swap the comparison operands of one arg to produce
3053 a match and compare that variant. */
3054 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3055 && COMPARISON_CLASS_P (arg0)
3056 && COMPARISON_CLASS_P (arg1))
3058 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3060 if (TREE_CODE (arg0) == swap_code)
3061 return operand_equal_p (TREE_OPERAND (arg0, 0),
3062 TREE_OPERAND (arg1, 1), flags)
3063 && operand_equal_p (TREE_OPERAND (arg0, 1),
3064 TREE_OPERAND (arg1, 0), flags);
3067 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3068 /* This is needed for conversions and for COMPONENT_REF.
3069 Might as well play it safe and always test this. */
3070 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below. */
3082 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3083 && (TREE_CODE (arg0) == SAVE_EXPR
3084 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3087 /* Next handle constant cases, those for which we can return 1 even
3088 if ONLY_CONST is set. */
3089 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3090 switch (TREE_CODE (arg0))
3093 return tree_int_cst_equal (arg0, arg1);
3096 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3097 TREE_FIXED_CST (arg1));
3100 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3101 TREE_REAL_CST (arg1)))
3105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3107 /* If we do not distinguish between signed and unsigned zero,
3108 consider them equal. */
3109 if (real_zerop (arg0) && real_zerop (arg1))
3118 v1 = TREE_VECTOR_CST_ELTS (arg0);
3119 v2 = TREE_VECTOR_CST_ELTS (arg1);
3122 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3125 v1 = TREE_CHAIN (v1);
3126 v2 = TREE_CHAIN (v2);
3133 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3135 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3139 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3140 && ! memcmp (TREE_STRING_POINTER (arg0),
3141 TREE_STRING_POINTER (arg1),
3142 TREE_STRING_LENGTH (arg0)));
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3151 if (flags & OEP_ONLY_CONST)
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3173 case FIX_TRUNC_EXPR:
3174 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3175 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3185 case tcc_comparison:
3187 if (OP_SAME (0) && OP_SAME (1))
3190 /* For commutative ops, allow the other order. */
3191 return (commutative_tree_code (TREE_CODE (arg0))
3192 && operand_equal_p (TREE_OPERAND (arg0, 0),
3193 TREE_OPERAND (arg1, 1), flags)
3194 && operand_equal_p (TREE_OPERAND (arg0, 1),
3195 TREE_OPERAND (arg1, 0), flags));
3198 /* If either of the pointer (or reference) expressions we are
3199 dereferencing contain a side effect, these cannot be equal. */
3200 if (TREE_SIDE_EFFECTS (arg0)
3201 || TREE_SIDE_EFFECTS (arg1))
3204 switch (TREE_CODE (arg0))
3207 case ALIGN_INDIRECT_REF:
3208 case MISALIGNED_INDIRECT_REF:
3214 case ARRAY_RANGE_REF:
3215 /* Operands 2 and 3 may be null.
3216 Compare the array index by value if it is constant first as we
3217 may have different types but same value here. */
3219 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3220 TREE_OPERAND (arg1, 1))
3222 && OP_SAME_WITH_NULL (2)
3223 && OP_SAME_WITH_NULL (3));
3226 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3227 may be NULL when we're called to compare MEM_EXPRs. */
3228 return OP_SAME_WITH_NULL (0)
3230 && OP_SAME_WITH_NULL (2);
3233 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3239 case tcc_expression:
3240 switch (TREE_CODE (arg0))
3243 case TRUTH_NOT_EXPR:
3246 case TRUTH_ANDIF_EXPR:
3247 case TRUTH_ORIF_EXPR:
3248 return OP_SAME (0) && OP_SAME (1);
3250 case TRUTH_AND_EXPR:
3252 case TRUTH_XOR_EXPR:
3253 if (OP_SAME (0) && OP_SAME (1))
3256 /* Otherwise take into account this is a commutative operation. */
3257 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3258 TREE_OPERAND (arg1, 1), flags)
3259 && operand_equal_p (TREE_OPERAND (arg0, 1),
3260 TREE_OPERAND (arg1, 0), flags));
3267 switch (TREE_CODE (arg0))
3270 /* If the CALL_EXPRs call different functions, then they
3271 clearly can not be equal. */
3272 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3277 unsigned int cef = call_expr_flags (arg0);
3278 if (flags & OEP_PURE_SAME)
3279 cef &= ECF_CONST | ECF_PURE;
3286 /* Now see if all the arguments are the same. */
3288 const_call_expr_arg_iterator iter0, iter1;
3290 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3291 a1 = first_const_call_expr_arg (arg1, &iter1);
3293 a0 = next_const_call_expr_arg (&iter0),
3294 a1 = next_const_call_expr_arg (&iter1))
3295 if (! operand_equal_p (a0, a1, flags))
3298 /* If we get here and both argument lists are exhausted
3299 then the CALL_EXPRs are equal. */
3300 return ! (a0 || a1);
3306 case tcc_declaration:
3307 /* Consider __builtin_sqrt equal to sqrt. */
3308 return (TREE_CODE (arg0) == FUNCTION_DECL
3309 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3310 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3311 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3318 #undef OP_SAME_WITH_NULL
3321 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3322 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3324 When in doubt, return 0. */
3327 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3329 int unsignedp1, unsignedpo;
3330 tree primarg0, primarg1, primother;
3331 unsigned int correct_width;
3333 if (operand_equal_p (arg0, arg1, 0))
3336 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3337 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3340 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3341 and see if the inner values are the same. This removes any
3342 signedness comparison, which doesn't matter here. */
3343 primarg0 = arg0, primarg1 = arg1;
3344 STRIP_NOPS (primarg0);
3345 STRIP_NOPS (primarg1);
3346 if (operand_equal_p (primarg0, primarg1, 0))
3349 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3350 actual comparison operand, ARG0.
3352 First throw away any conversions to wider types
3353 already present in the operands. */
3355 primarg1 = get_narrower (arg1, &unsignedp1);
3356 primother = get_narrower (other, &unsignedpo);
3358 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3359 if (unsignedp1 == unsignedpo
3360 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3361 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3363 tree type = TREE_TYPE (arg0);
3365 /* Make sure shorter operand is extended the right way
3366 to match the longer operand. */
3367 primarg1 = fold_convert (signed_or_unsigned_type_for
3368 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3370 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3377 /* See if ARG is an expression that is either a comparison or is performing
3378 arithmetic on comparisons. The comparisons must only be comparing
3379 two different values, which will be stored in *CVAL1 and *CVAL2; if
3380 they are nonzero it means that some operands have already been found.
3381 No variables may be used anywhere else in the expression except in the
3382 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3383 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3385 If this is true, return 1. Otherwise, return zero. */
3388 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3390 enum tree_code code = TREE_CODE (arg);
3391 enum tree_code_class class = TREE_CODE_CLASS (code);
3393 /* We can handle some of the tcc_expression cases here. */
3394 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3396 else if (class == tcc_expression
3397 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3398 || code == COMPOUND_EXPR))
3401 else if (class == tcc_expression && code == SAVE_EXPR
3402 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3404 /* If we've already found a CVAL1 or CVAL2, this expression is
3405 two complex to handle. */
3406 if (*cval1 || *cval2)
3416 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3419 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3420 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3421 cval1, cval2, save_p));
3426 case tcc_expression:
3427 if (code == COND_EXPR)
3428 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3429 cval1, cval2, save_p)
3430 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3431 cval1, cval2, save_p)
3432 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3433 cval1, cval2, save_p));
3436 case tcc_comparison:
3437 /* First see if we can handle the first operand, then the second. For
3438 the second operand, we know *CVAL1 can't be zero. It must be that
3439 one side of the comparison is each of the values; test for the
3440 case where this isn't true by failing if the two operands
3443 if (operand_equal_p (TREE_OPERAND (arg, 0),
3444 TREE_OPERAND (arg, 1), 0))
3448 *cval1 = TREE_OPERAND (arg, 0);
3449 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3451 else if (*cval2 == 0)
3452 *cval2 = TREE_OPERAND (arg, 0);
3453 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3458 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3460 else if (*cval2 == 0)
3461 *cval2 = TREE_OPERAND (arg, 1);
3462 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3474 /* ARG is a tree that is known to contain just arithmetic operations and
3475 comparisons. Evaluate the operations in the tree substituting NEW0 for
3476 any occurrence of OLD0 as an operand of a comparison and likewise for
3480 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3482 tree type = TREE_TYPE (arg);
3483 enum tree_code code = TREE_CODE (arg);
3484 enum tree_code_class class = TREE_CODE_CLASS (code);
3486 /* We can handle some of the tcc_expression cases here. */
3487 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3489 else if (class == tcc_expression
3490 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3496 return fold_build1 (code, type,
3497 eval_subst (TREE_OPERAND (arg, 0),
3498 old0, new0, old1, new1));
3501 return fold_build2 (code, type,
3502 eval_subst (TREE_OPERAND (arg, 0),
3503 old0, new0, old1, new1),
3504 eval_subst (TREE_OPERAND (arg, 1),
3505 old0, new0, old1, new1));
3507 case tcc_expression:
3511 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3514 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3517 return fold_build3 (code, type,
3518 eval_subst (TREE_OPERAND (arg, 0),
3519 old0, new0, old1, new1),
3520 eval_subst (TREE_OPERAND (arg, 1),
3521 old0, new0, old1, new1),
3522 eval_subst (TREE_OPERAND (arg, 2),
3523 old0, new0, old1, new1));
3527 /* Fall through - ??? */
3529 case tcc_comparison:
3531 tree arg0 = TREE_OPERAND (arg, 0);
3532 tree arg1 = TREE_OPERAND (arg, 1);
3534 /* We need to check both for exact equality and tree equality. The
3535 former will be true if the operand has a side-effect. In that
3536 case, we know the operand occurred exactly once. */
3538 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3540 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3543 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3545 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3548 return fold_build2 (code, type, arg0, arg1);
3556 /* Return a tree for the case when the result of an expression is RESULT
3557 converted to TYPE and OMITTED was previously an operand of the expression
3558 but is now not needed (e.g., we folded OMITTED * 0).
3560 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3561 the conversion of RESULT to TYPE. */
3564 omit_one_operand (tree type, tree result, tree omitted)
3566 tree t = fold_convert (type, result);
3568 /* If the resulting operand is an empty statement, just return the omitted
3569 statement casted to void. */
3570 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3571 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3573 if (TREE_SIDE_EFFECTS (omitted))
3574 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3576 return non_lvalue (t);
3579 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3582 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3584 tree t = fold_convert (type, result);
3586 /* If the resulting operand is an empty statement, just return the omitted
3587 statement casted to void. */
3588 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3589 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3591 if (TREE_SIDE_EFFECTS (omitted))
3592 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3594 return pedantic_non_lvalue (t);
3597 /* Return a tree for the case when the result of an expression is RESULT
3598 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3599 of the expression but are now not needed.
3601 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3602 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3603 evaluated before OMITTED2. Otherwise, if neither has side effects,
3604 just do the conversion of RESULT to TYPE. */
3607 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3609 tree t = fold_convert (type, result);
3611 if (TREE_SIDE_EFFECTS (omitted2))
3612 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3613 if (TREE_SIDE_EFFECTS (omitted1))
3614 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3616 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3620 /* Return a simplified tree node for the truth-negation of ARG. This
3621 never alters ARG itself. We assume that ARG is an operation that
3622 returns a truth value (0 or 1).
3624 FIXME: one would think we would fold the result, but it causes
3625 problems with the dominator optimizer. */
3628 fold_truth_not_expr (tree arg)
3630 tree type = TREE_TYPE (arg);
3631 enum tree_code code = TREE_CODE (arg);
3633 /* If this is a comparison, we can simply invert it, except for
3634 floating-point non-equality comparisons, in which case we just
3635 enclose a TRUTH_NOT_EXPR around what we have. */
3637 if (TREE_CODE_CLASS (code) == tcc_comparison)
3639 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3640 if (FLOAT_TYPE_P (op_type)
3641 && flag_trapping_math
3642 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3643 && code != NE_EXPR && code != EQ_EXPR)
3647 code = invert_tree_comparison (code,
3648 HONOR_NANS (TYPE_MODE (op_type)));
3649 if (code == ERROR_MARK)
3652 return build2 (code, type,
3653 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3660 return constant_boolean_node (integer_zerop (arg), type);
3662 case TRUTH_AND_EXPR:
3663 return build2 (TRUTH_OR_EXPR, type,
3664 invert_truthvalue (TREE_OPERAND (arg, 0)),
3665 invert_truthvalue (TREE_OPERAND (arg, 1)));
3668 return build2 (TRUTH_AND_EXPR, type,
3669 invert_truthvalue (TREE_OPERAND (arg, 0)),
3670 invert_truthvalue (TREE_OPERAND (arg, 1)));
3672 case TRUTH_XOR_EXPR:
3673 /* Here we can invert either operand. We invert the first operand
3674 unless the second operand is a TRUTH_NOT_EXPR in which case our
3675 result is the XOR of the first operand with the inside of the
3676 negation of the second operand. */
3678 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3679 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3680 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3682 return build2 (TRUTH_XOR_EXPR, type,
3683 invert_truthvalue (TREE_OPERAND (arg, 0)),
3684 TREE_OPERAND (arg, 1));
3686 case TRUTH_ANDIF_EXPR:
3687 return build2 (TRUTH_ORIF_EXPR, type,
3688 invert_truthvalue (TREE_OPERAND (arg, 0)),
3689 invert_truthvalue (TREE_OPERAND (arg, 1)));
3691 case TRUTH_ORIF_EXPR:
3692 return build2 (TRUTH_ANDIF_EXPR, type,
3693 invert_truthvalue (TREE_OPERAND (arg, 0)),
3694 invert_truthvalue (TREE_OPERAND (arg, 1)));
3696 case TRUTH_NOT_EXPR:
3697 return TREE_OPERAND (arg, 0);
3701 tree arg1 = TREE_OPERAND (arg, 1);
3702 tree arg2 = TREE_OPERAND (arg, 2);
3703 /* A COND_EXPR may have a throw as one operand, which
3704 then has void type. Just leave void operands
3706 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3707 VOID_TYPE_P (TREE_TYPE (arg1))
3708 ? arg1 : invert_truthvalue (arg1),
3709 VOID_TYPE_P (TREE_TYPE (arg2))
3710 ? arg2 : invert_truthvalue (arg2));
3714 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3715 invert_truthvalue (TREE_OPERAND (arg, 1)));
3717 case NON_LVALUE_EXPR:
3718 return invert_truthvalue (TREE_OPERAND (arg, 0));
3721 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3722 return build1 (TRUTH_NOT_EXPR, type, arg);
3726 return build1 (TREE_CODE (arg), type,
3727 invert_truthvalue (TREE_OPERAND (arg, 0)));
3730 if (!integer_onep (TREE_OPERAND (arg, 1)))
3732 return build2 (EQ_EXPR, type, arg,
3733 build_int_cst (type, 0));
3736 return build1 (TRUTH_NOT_EXPR, type, arg);
3738 case CLEANUP_POINT_EXPR:
3739 return build1 (CLEANUP_POINT_EXPR, type,
3740 invert_truthvalue (TREE_OPERAND (arg, 0)));
3749 /* Return a simplified tree node for the truth-negation of ARG. This
3750 never alters ARG itself. We assume that ARG is an operation that
3751 returns a truth value (0 or 1).
3753 FIXME: one would think we would fold the result, but it causes
3754 problems with the dominator optimizer. */
3757 invert_truthvalue (tree arg)
3761 if (TREE_CODE (arg) == ERROR_MARK)
3764 tem = fold_truth_not_expr (arg);
3766 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3771 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3772 operands are another bit-wise operation with a common input. If so,
3773 distribute the bit operations to save an operation and possibly two if
3774 constants are involved. For example, convert
3775 (A | B) & (A | C) into A | (B & C)
3776 Further simplification will occur if B and C are constants.
3778 If this optimization cannot be done, 0 will be returned. */
3781 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3786 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3787 || TREE_CODE (arg0) == code
3788 || (TREE_CODE (arg0) != BIT_AND_EXPR
3789 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3792 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3794 common = TREE_OPERAND (arg0, 0);
3795 left = TREE_OPERAND (arg0, 1);
3796 right = TREE_OPERAND (arg1, 1);
3798 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3800 common = TREE_OPERAND (arg0, 0);
3801 left = TREE_OPERAND (arg0, 1);
3802 right = TREE_OPERAND (arg1, 0);
3804 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3806 common = TREE_OPERAND (arg0, 1);
3807 left = TREE_OPERAND (arg0, 0);
3808 right = TREE_OPERAND (arg1, 1);
3810 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3812 common = TREE_OPERAND (arg0, 1);
3813 left = TREE_OPERAND (arg0, 0);
3814 right = TREE_OPERAND (arg1, 0);
3819 return fold_build2 (TREE_CODE (arg0), type, common,
3820 fold_build2 (code, type, left, right));
3823 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3824 with code CODE. This optimization is unsafe. */
3826 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3828 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3829 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3831 /* (A / C) +- (B / C) -> (A +- B) / C. */
3833 && operand_equal_p (TREE_OPERAND (arg0, 1),
3834 TREE_OPERAND (arg1, 1), 0))
3835 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3836 fold_build2 (code, type,
3837 TREE_OPERAND (arg0, 0),
3838 TREE_OPERAND (arg1, 0)),
3839 TREE_OPERAND (arg0, 1));
3841 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3842 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3843 TREE_OPERAND (arg1, 0), 0)
3844 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3845 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3847 REAL_VALUE_TYPE r0, r1;
3848 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3849 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3851 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3853 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3854 real_arithmetic (&r0, code, &r0, &r1);
3855 return fold_build2 (MULT_EXPR, type,
3856 TREE_OPERAND (arg0, 0),
3857 build_real (type, r0));
3863 /* Subroutine for fold_truthop: decode a field reference.
3865 If EXP is a comparison reference, we return the innermost reference.
3867 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3868 set to the starting bit number.
3870 If the innermost field can be completely contained in a mode-sized
3871 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3873 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3874 otherwise it is not changed.
3876 *PUNSIGNEDP is set to the signedness of the field.
3878 *PMASK is set to the mask used. This is either contained in a
3879 BIT_AND_EXPR or derived from the width of the field.
3881 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3883 Return 0 if this is not a component reference or is one that we can't
3884 do anything with. */
3887 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3888 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3889 int *punsignedp, int *pvolatilep,
3890 tree *pmask, tree *pand_mask)
3892 tree outer_type = 0;
3894 tree mask, inner, offset;
3896 unsigned int precision;
3898 /* All the optimizations using this function assume integer fields.
3899 There are problems with FP fields since the type_for_size call
3900 below can fail for, e.g., XFmode. */
3901 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3904 /* We are interested in the bare arrangement of bits, so strip everything
3905 that doesn't affect the machine mode. However, record the type of the
3906 outermost expression if it may matter below. */
3907 if (TREE_CODE (exp) == NOP_EXPR
3908 || TREE_CODE (exp) == CONVERT_EXPR
3909 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3910 outer_type = TREE_TYPE (exp);
3913 if (TREE_CODE (exp) == BIT_AND_EXPR)
3915 and_mask = TREE_OPERAND (exp, 1);
3916 exp = TREE_OPERAND (exp, 0);
3917 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3918 if (TREE_CODE (and_mask) != INTEGER_CST)
3922 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3923 punsignedp, pvolatilep, false);
3924 if ((inner == exp && and_mask == 0)
3925 || *pbitsize < 0 || offset != 0
3926 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3929 /* If the number of bits in the reference is the same as the bitsize of
3930 the outer type, then the outer type gives the signedness. Otherwise
3931 (in case of a small bitfield) the signedness is unchanged. */
3932 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3933 *punsignedp = TYPE_UNSIGNED (outer_type);
3935 /* Compute the mask to access the bitfield. */
3936 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3937 precision = TYPE_PRECISION (unsigned_type);
3939 mask = build_int_cst_type (unsigned_type, -1);
3941 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3942 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3944 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3946 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3947 fold_convert (unsigned_type, and_mask), mask);
3950 *pand_mask = and_mask;
3954 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3955 represents the sign bit of EXP's type. If EXP represents a sign
3956 or zero extension, also test VAL against the unextended type.
3957 The return value is the (sub)expression whose sign bit is VAL,
3958 or NULL_TREE otherwise. */
3961 sign_bit_p (tree exp, const_tree val)
3963 unsigned HOST_WIDE_INT mask_lo, lo;
3964 HOST_WIDE_INT mask_hi, hi;
3968 /* Tree EXP must have an integral type. */
3969 t = TREE_TYPE (exp);
3970 if (! INTEGRAL_TYPE_P (t))
3973 /* Tree VAL must be an integer constant. */
3974 if (TREE_CODE (val) != INTEGER_CST
3975 || TREE_OVERFLOW (val))
3978 width = TYPE_PRECISION (t);
3979 if (width > HOST_BITS_PER_WIDE_INT)
3981 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3984 mask_hi = ((unsigned HOST_WIDE_INT) -1
3985 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3991 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3994 mask_lo = ((unsigned HOST_WIDE_INT) -1
3995 >> (HOST_BITS_PER_WIDE_INT - width));
3998 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3999 treat VAL as if it were unsigned. */
4000 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4001 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4004 /* Handle extension from a narrower type. */
4005 if (TREE_CODE (exp) == NOP_EXPR
4006 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4007 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4012 /* Subroutine for fold_truthop: determine if an operand is simple enough
4013 to be evaluated unconditionally. */
4016 simple_operand_p (const_tree exp)
4018 /* Strip any conversions that don't change the machine mode. */
4021 return (CONSTANT_CLASS_P (exp)
4022 || TREE_CODE (exp) == SSA_NAME
4024 && ! TREE_ADDRESSABLE (exp)
4025 && ! TREE_THIS_VOLATILE (exp)
4026 && ! DECL_NONLOCAL (exp)
4027 /* Don't regard global variables as simple. They may be
4028 allocated in ways unknown to the compiler (shared memory,
4029 #pragma weak, etc). */
4030 && ! TREE_PUBLIC (exp)
4031 && ! DECL_EXTERNAL (exp)
4032 /* Loading a static variable is unduly expensive, but global
4033 registers aren't expensive. */
4034 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4037 /* The following functions are subroutines to fold_range_test and allow it to
4038 try to change a logical combination of comparisons into a range test.
4041 X == 2 || X == 3 || X == 4 || X == 5
4045 (unsigned) (X - 2) <= 3
4047 We describe each set of comparisons as being either inside or outside
4048 a range, using a variable named like IN_P, and then describe the
4049 range with a lower and upper bound. If one of the bounds is omitted,
4050 it represents either the highest or lowest value of the type.
4052 In the comments below, we represent a range by two numbers in brackets
4053 preceded by a "+" to designate being inside that range, or a "-" to
4054 designate being outside that range, so the condition can be inverted by
4055 flipping the prefix. An omitted bound is represented by a "-". For
4056 example, "- [-, 10]" means being outside the range starting at the lowest
4057 possible value and ending at 10, in other words, being greater than 10.
4058 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4061 We set up things so that the missing bounds are handled in a consistent
4062 manner so neither a missing bound nor "true" and "false" need to be
4063 handled using a special case. */
4065 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4066 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4067 and UPPER1_P are nonzero if the respective argument is an upper bound
4068 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4069 must be specified for a comparison. ARG1 will be converted to ARG0's
4070 type if both are specified. */
4073 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4074 tree arg1, int upper1_p)
4080 /* If neither arg represents infinity, do the normal operation.
4081 Else, if not a comparison, return infinity. Else handle the special
4082 comparison rules. Note that most of the cases below won't occur, but
4083 are handled for consistency. */
4085 if (arg0 != 0 && arg1 != 0)
4087 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4088 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4090 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4093 if (TREE_CODE_CLASS (code) != tcc_comparison)
4096 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4097 for neither. In real maths, we cannot assume open ended ranges are
4098 the same. But, this is computer arithmetic, where numbers are finite.
4099 We can therefore make the transformation of any unbounded range with
4100 the value Z, Z being greater than any representable number. This permits
4101 us to treat unbounded ranges as equal. */
4102 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4103 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4107 result = sgn0 == sgn1;
4110 result = sgn0 != sgn1;
4113 result = sgn0 < sgn1;
4116 result = sgn0 <= sgn1;
4119 result = sgn0 > sgn1;
4122 result = sgn0 >= sgn1;
4128 return constant_boolean_node (result, type);
4131 /* Given EXP, a logical expression, set the range it is testing into
4132 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4133 actually being tested. *PLOW and *PHIGH will be made of the same
4134 type as the returned expression. If EXP is not a comparison, we
4135 will most likely not be returning a useful value and range. Set
4136 *STRICT_OVERFLOW_P to true if the return value is only valid
4137 because signed overflow is undefined; otherwise, do not change
4138 *STRICT_OVERFLOW_P. */
4141 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4142 bool *strict_overflow_p)
4144 enum tree_code code;
4145 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4146 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4148 tree low, high, n_low, n_high;
4150 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4151 and see if we can refine the range. Some of the cases below may not
4152 happen, but it doesn't seem worth worrying about this. We "continue"
4153 the outer loop when we've changed something; otherwise we "break"
4154 the switch, which will "break" the while. */
4157 low = high = build_int_cst (TREE_TYPE (exp), 0);
4161 code = TREE_CODE (exp);
4162 exp_type = TREE_TYPE (exp);
4164 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4166 if (TREE_OPERAND_LENGTH (exp) > 0)
4167 arg0 = TREE_OPERAND (exp, 0);
4168 if (TREE_CODE_CLASS (code) == tcc_comparison
4169 || TREE_CODE_CLASS (code) == tcc_unary
4170 || TREE_CODE_CLASS (code) == tcc_binary)
4171 arg0_type = TREE_TYPE (arg0);
4172 if (TREE_CODE_CLASS (code) == tcc_binary
4173 || TREE_CODE_CLASS (code) == tcc_comparison
4174 || (TREE_CODE_CLASS (code) == tcc_expression
4175 && TREE_OPERAND_LENGTH (exp) > 1))
4176 arg1 = TREE_OPERAND (exp, 1);
4181 case TRUTH_NOT_EXPR:
4182 in_p = ! in_p, exp = arg0;
4185 case EQ_EXPR: case NE_EXPR:
4186 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4187 /* We can only do something if the range is testing for zero
4188 and if the second operand is an integer constant. Note that
4189 saying something is "in" the range we make is done by
4190 complementing IN_P since it will set in the initial case of
4191 being not equal to zero; "out" is leaving it alone. */
4192 if (low == 0 || high == 0
4193 || ! integer_zerop (low) || ! integer_zerop (high)
4194 || TREE_CODE (arg1) != INTEGER_CST)
4199 case NE_EXPR: /* - [c, c] */
4202 case EQ_EXPR: /* + [c, c] */
4203 in_p = ! in_p, low = high = arg1;
4205 case GT_EXPR: /* - [-, c] */
4206 low = 0, high = arg1;
4208 case GE_EXPR: /* + [c, -] */
4209 in_p = ! in_p, low = arg1, high = 0;
4211 case LT_EXPR: /* - [c, -] */
4212 low = arg1, high = 0;
4214 case LE_EXPR: /* + [-, c] */
4215 in_p = ! in_p, low = 0, high = arg1;
4221 /* If this is an unsigned comparison, we also know that EXP is
4222 greater than or equal to zero. We base the range tests we make
4223 on that fact, so we record it here so we can parse existing
4224 range tests. We test arg0_type since often the return type
4225 of, e.g. EQ_EXPR, is boolean. */
4226 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4228 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4230 build_int_cst (arg0_type, 0),
4234 in_p = n_in_p, low = n_low, high = n_high;
4236 /* If the high bound is missing, but we have a nonzero low
4237 bound, reverse the range so it goes from zero to the low bound
4239 if (high == 0 && low && ! integer_zerop (low))
4242 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4243 integer_one_node, 0);
4244 low = build_int_cst (arg0_type, 0);
4252 /* (-x) IN [a,b] -> x in [-b, -a] */
4253 n_low = range_binop (MINUS_EXPR, exp_type,
4254 build_int_cst (exp_type, 0),
4256 n_high = range_binop (MINUS_EXPR, exp_type,
4257 build_int_cst (exp_type, 0),
4259 low = n_low, high = n_high;
4265 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4266 build_int_cst (exp_type, 1));
4269 case PLUS_EXPR: case MINUS_EXPR:
4270 if (TREE_CODE (arg1) != INTEGER_CST)
4273 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4274 move a constant to the other side. */
4275 if (!TYPE_UNSIGNED (arg0_type)
4276 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4279 /* If EXP is signed, any overflow in the computation is undefined,
4280 so we don't worry about it so long as our computations on
4281 the bounds don't overflow. For unsigned, overflow is defined
4282 and this is exactly the right thing. */
4283 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4284 arg0_type, low, 0, arg1, 0);
4285 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4286 arg0_type, high, 1, arg1, 0);
4287 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4288 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4291 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4292 *strict_overflow_p = true;
4294 /* Check for an unsigned range which has wrapped around the maximum
4295 value thus making n_high < n_low, and normalize it. */
4296 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4298 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4299 integer_one_node, 0);
4300 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4301 integer_one_node, 0);
4303 /* If the range is of the form +/- [ x+1, x ], we won't
4304 be able to normalize it. But then, it represents the
4305 whole range or the empty set, so make it
4307 if (tree_int_cst_equal (n_low, low)
4308 && tree_int_cst_equal (n_high, high))
4314 low = n_low, high = n_high;
4319 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4320 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4323 if (! INTEGRAL_TYPE_P (arg0_type)
4324 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4325 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4328 n_low = low, n_high = high;
4331 n_low = fold_convert (arg0_type, n_low);
4334 n_high = fold_convert (arg0_type, n_high);
4337 /* If we're converting arg0 from an unsigned type, to exp,
4338 a signed type, we will be doing the comparison as unsigned.
4339 The tests above have already verified that LOW and HIGH
4342 So we have to ensure that we will handle large unsigned
4343 values the same way that the current signed bounds treat
4346 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4350 /* For fixed-point modes, we need to pass the saturating flag
4351 as the 2nd parameter. */
4352 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4353 equiv_type = lang_hooks.types.type_for_mode
4354 (TYPE_MODE (arg0_type),
4355 TYPE_SATURATING (arg0_type));
4357 equiv_type = lang_hooks.types.type_for_mode
4358 (TYPE_MODE (arg0_type), 1);
4360 /* A range without an upper bound is, naturally, unbounded.
4361 Since convert would have cropped a very large value, use
4362 the max value for the destination type. */
4364 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4365 : TYPE_MAX_VALUE (arg0_type);
4367 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4368 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4369 fold_convert (arg0_type,
4371 build_int_cst (arg0_type, 1));
4373 /* If the low bound is specified, "and" the range with the
4374 range for which the original unsigned value will be
4378 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4379 1, n_low, n_high, 1,
4380 fold_convert (arg0_type,
4385 in_p = (n_in_p == in_p);
4389 /* Otherwise, "or" the range with the range of the input
4390 that will be interpreted as negative. */
4391 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4392 0, n_low, n_high, 1,
4393 fold_convert (arg0_type,
4398 in_p = (in_p != n_in_p);
4403 low = n_low, high = n_high;
4413 /* If EXP is a constant, we can evaluate whether this is true or false. */
4414 if (TREE_CODE (exp) == INTEGER_CST)
4416 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4418 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4424 *pin_p = in_p, *plow = low, *phigh = high;
4428 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4429 type, TYPE, return an expression to test if EXP is in (or out of, depending
4430 on IN_P) the range. Return 0 if the test couldn't be created. */
4433 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4435 tree etype = TREE_TYPE (exp);
4438 #ifdef HAVE_canonicalize_funcptr_for_compare
4439 /* Disable this optimization for function pointer expressions
4440 on targets that require function pointer canonicalization. */
4441 if (HAVE_canonicalize_funcptr_for_compare
4442 && TREE_CODE (etype) == POINTER_TYPE
4443 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4449 value = build_range_check (type, exp, 1, low, high);
4451 return invert_truthvalue (value);
4456 if (low == 0 && high == 0)
4457 return build_int_cst (type, 1);
4460 return fold_build2 (LE_EXPR, type, exp,
4461 fold_convert (etype, high));
4464 return fold_build2 (GE_EXPR, type, exp,
4465 fold_convert (etype, low));
4467 if (operand_equal_p (low, high, 0))
4468 return fold_build2 (EQ_EXPR, type, exp,
4469 fold_convert (etype, low));
4471 if (integer_zerop (low))
4473 if (! TYPE_UNSIGNED (etype))
4475 etype = unsigned_type_for (etype);
4476 high = fold_convert (etype, high);
4477 exp = fold_convert (etype, exp);
4479 return build_range_check (type, exp, 1, 0, high);
4482 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4483 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4485 unsigned HOST_WIDE_INT lo;
4489 prec = TYPE_PRECISION (etype);
4490 if (prec <= HOST_BITS_PER_WIDE_INT)
4493 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4497 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4498 lo = (unsigned HOST_WIDE_INT) -1;
4501 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4503 if (TYPE_UNSIGNED (etype))
4505 etype = signed_type_for (etype);
4506 exp = fold_convert (etype, exp);
4508 return fold_build2 (GT_EXPR, type, exp,
4509 build_int_cst (etype, 0));
4513 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4514 This requires wrap-around arithmetics for the type of the expression. */
4515 switch (TREE_CODE (etype))
4518 /* There is no requirement that LOW be within the range of ETYPE
4519 if the latter is a subtype. It must, however, be within the base
4520 type of ETYPE. So be sure we do the subtraction in that type. */
4521 if (TREE_TYPE (etype))
4522 etype = TREE_TYPE (etype);
4527 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4528 TYPE_UNSIGNED (etype));
4535 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4536 if (TREE_CODE (etype) == INTEGER_TYPE
4537 && !TYPE_OVERFLOW_WRAPS (etype))
4539 tree utype, minv, maxv;
4541 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4542 for the type in question, as we rely on this here. */
4543 utype = unsigned_type_for (etype);
4544 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4545 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4546 integer_one_node, 1);
4547 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4549 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4556 high = fold_convert (etype, high);
4557 low = fold_convert (etype, low);
4558 exp = fold_convert (etype, exp);
4560 value = const_binop (MINUS_EXPR, high, low, 0);
4563 if (POINTER_TYPE_P (etype))
4565 if (value != 0 && !TREE_OVERFLOW (value))
4567 low = fold_convert (sizetype, low);
4568 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4569 return build_range_check (type,
4570 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4571 1, build_int_cst (etype, 0), value);
4576 if (value != 0 && !TREE_OVERFLOW (value))
4577 return build_range_check (type,
4578 fold_build2 (MINUS_EXPR, etype, exp, low),
4579 1, build_int_cst (etype, 0), value);
4584 /* Return the predecessor of VAL in its type, handling the infinite case. */
4587 range_predecessor (tree val)
4589 tree type = TREE_TYPE (val);
4591 if (INTEGRAL_TYPE_P (type)
4592 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4595 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4598 /* Return the successor of VAL in its type, handling the infinite case. */
4601 range_successor (tree val)
4603 tree type = TREE_TYPE (val);
4605 if (INTEGRAL_TYPE_P (type)
4606 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4609 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4612 /* Given two ranges, see if we can merge them into one. Return 1 if we
4613 can, 0 if we can't. Set the output range into the specified parameters. */
4616 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4617 tree high0, int in1_p, tree low1, tree high1)
4625 int lowequal = ((low0 == 0 && low1 == 0)
4626 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4627 low0, 0, low1, 0)));
4628 int highequal = ((high0 == 0 && high1 == 0)
4629 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4630 high0, 1, high1, 1)));
4632 /* Make range 0 be the range that starts first, or ends last if they
4633 start at the same value. Swap them if it isn't. */
4634 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4637 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4638 high1, 1, high0, 1))))
4640 temp = in0_p, in0_p = in1_p, in1_p = temp;
4641 tem = low0, low0 = low1, low1 = tem;
4642 tem = high0, high0 = high1, high1 = tem;
4645 /* Now flag two cases, whether the ranges are disjoint or whether the
4646 second range is totally subsumed in the first. Note that the tests
4647 below are simplified by the ones above. */
4648 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4649 high0, 1, low1, 0));
4650 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4651 high1, 1, high0, 1));
4653 /* We now have four cases, depending on whether we are including or
4654 excluding the two ranges. */
4657 /* If they don't overlap, the result is false. If the second range
4658 is a subset it is the result. Otherwise, the range is from the start
4659 of the second to the end of the first. */
4661 in_p = 0, low = high = 0;
4663 in_p = 1, low = low1, high = high1;
4665 in_p = 1, low = low1, high = high0;
4668 else if (in0_p && ! in1_p)
4670 /* If they don't overlap, the result is the first range. If they are
4671 equal, the result is false. If the second range is a subset of the
4672 first, and the ranges begin at the same place, we go from just after
4673 the end of the second range to the end of the first. If the second
4674 range is not a subset of the first, or if it is a subset and both
4675 ranges end at the same place, the range starts at the start of the
4676 first range and ends just before the second range.
4677 Otherwise, we can't describe this as a single range. */
4679 in_p = 1, low = low0, high = high0;
4680 else if (lowequal && highequal)
4681 in_p = 0, low = high = 0;
4682 else if (subset && lowequal)
4684 low = range_successor (high1);
4689 /* We are in the weird situation where high0 > high1 but
4690 high1 has no successor. Punt. */
4694 else if (! subset || highequal)
4697 high = range_predecessor (low1);
4701 /* low0 < low1 but low1 has no predecessor. Punt. */
4709 else if (! in0_p && in1_p)
4711 /* If they don't overlap, the result is the second range. If the second
4712 is a subset of the first, the result is false. Otherwise,
4713 the range starts just after the first range and ends at the
4714 end of the second. */
4716 in_p = 1, low = low1, high = high1;
4717 else if (subset || highequal)
4718 in_p = 0, low = high = 0;
4721 low = range_successor (high0);
4726 /* high1 > high0 but high0 has no successor. Punt. */
4734 /* The case where we are excluding both ranges. Here the complex case
4735 is if they don't overlap. In that case, the only time we have a
4736 range is if they are adjacent. If the second is a subset of the
4737 first, the result is the first. Otherwise, the range to exclude
4738 starts at the beginning of the first range and ends at the end of the
4742 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4743 range_successor (high0),
4745 in_p = 0, low = low0, high = high1;
4748 /* Canonicalize - [min, x] into - [-, x]. */
4749 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4750 switch (TREE_CODE (TREE_TYPE (low0)))
4753 if (TYPE_PRECISION (TREE_TYPE (low0))
4754 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4758 if (tree_int_cst_equal (low0,
4759 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4763 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4764 && integer_zerop (low0))
4771 /* Canonicalize - [x, max] into - [x, -]. */
4772 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4773 switch (TREE_CODE (TREE_TYPE (high1)))
4776 if (TYPE_PRECISION (TREE_TYPE (high1))
4777 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4781 if (tree_int_cst_equal (high1,
4782 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4786 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4787 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4789 integer_one_node, 1)))
4796 /* The ranges might be also adjacent between the maximum and
4797 minimum values of the given type. For
4798 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4799 return + [x + 1, y - 1]. */
4800 if (low0 == 0 && high1 == 0)
4802 low = range_successor (high0);
4803 high = range_predecessor (low1);
4804 if (low == 0 || high == 0)
4814 in_p = 0, low = low0, high = high0;
4816 in_p = 0, low = low0, high = high1;
4819 *pin_p = in_p, *plow = low, *phigh = high;
4824 /* Subroutine of fold, looking inside expressions of the form
4825 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4826 of the COND_EXPR. This function is being used also to optimize
4827 A op B ? C : A, by reversing the comparison first.
4829 Return a folded expression whose code is not a COND_EXPR
4830 anymore, or NULL_TREE if no folding opportunity is found. */
4833 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4835 enum tree_code comp_code = TREE_CODE (arg0);
4836 tree arg00 = TREE_OPERAND (arg0, 0);
4837 tree arg01 = TREE_OPERAND (arg0, 1);
4838 tree arg1_type = TREE_TYPE (arg1);
4844 /* If we have A op 0 ? A : -A, consider applying the following
4847 A == 0? A : -A same as -A
4848 A != 0? A : -A same as A
4849 A >= 0? A : -A same as abs (A)
4850 A > 0? A : -A same as abs (A)
4851 A <= 0? A : -A same as -abs (A)
4852 A < 0? A : -A same as -abs (A)
4854 None of these transformations work for modes with signed
4855 zeros. If A is +/-0, the first two transformations will
4856 change the sign of the result (from +0 to -0, or vice
4857 versa). The last four will fix the sign of the result,
4858 even though the original expressions could be positive or
4859 negative, depending on the sign of A.
4861 Note that all these transformations are correct if A is
4862 NaN, since the two alternatives (A and -A) are also NaNs. */
4863 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4864 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4865 ? real_zerop (arg01)
4866 : integer_zerop (arg01))
4867 && ((TREE_CODE (arg2) == NEGATE_EXPR
4868 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4869 /* In the case that A is of the form X-Y, '-A' (arg2) may
4870 have already been folded to Y-X, check for that. */
4871 || (TREE_CODE (arg1) == MINUS_EXPR
4872 && TREE_CODE (arg2) == MINUS_EXPR
4873 && operand_equal_p (TREE_OPERAND (arg1, 0),
4874 TREE_OPERAND (arg2, 1), 0)
4875 && operand_equal_p (TREE_OPERAND (arg1, 1),
4876 TREE_OPERAND (arg2, 0), 0))))
4881 tem = fold_convert (arg1_type, arg1);
4882 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4885 return pedantic_non_lvalue (fold_convert (type, arg1));
4888 if (flag_trapping_math)
4893 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4894 arg1 = fold_convert (signed_type_for
4895 (TREE_TYPE (arg1)), arg1);
4896 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4897 return pedantic_non_lvalue (fold_convert (type, tem));
4900 if (flag_trapping_math)
4904 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4905 arg1 = fold_convert (signed_type_for
4906 (TREE_TYPE (arg1)), arg1);
4907 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4908 return negate_expr (fold_convert (type, tem));
4910 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4914 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4915 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4916 both transformations are correct when A is NaN: A != 0
4917 is then true, and A == 0 is false. */
4919 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4920 && integer_zerop (arg01) && integer_zerop (arg2))
4922 if (comp_code == NE_EXPR)
4923 return pedantic_non_lvalue (fold_convert (type, arg1));
4924 else if (comp_code == EQ_EXPR)
4925 return build_int_cst (type, 0);
4928 /* Try some transformations of A op B ? A : B.
4930 A == B? A : B same as B
4931 A != B? A : B same as A
4932 A >= B? A : B same as max (A, B)
4933 A > B? A : B same as max (B, A)
4934 A <= B? A : B same as min (A, B)
4935 A < B? A : B same as min (B, A)
4937 As above, these transformations don't work in the presence
4938 of signed zeros. For example, if A and B are zeros of
4939 opposite sign, the first two transformations will change
4940 the sign of the result. In the last four, the original
4941 expressions give different results for (A=+0, B=-0) and
4942 (A=-0, B=+0), but the transformed expressions do not.
4944 The first two transformations are correct if either A or B
4945 is a NaN. In the first transformation, the condition will
4946 be false, and B will indeed be chosen. In the case of the
4947 second transformation, the condition A != B will be true,
4948 and A will be chosen.
4950 The conversions to max() and min() are not correct if B is
4951 a number and A is not. The conditions in the original
4952 expressions will be false, so all four give B. The min()
4953 and max() versions would give a NaN instead. */
4954 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4955 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4956 /* Avoid these transformations if the COND_EXPR may be used
4957 as an lvalue in the C++ front-end. PR c++/19199. */
4959 || (strcmp (lang_hooks.name, "GNU C++") != 0
4960 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4961 || ! maybe_lvalue_p (arg1)
4962 || ! maybe_lvalue_p (arg2)))
4964 tree comp_op0 = arg00;
4965 tree comp_op1 = arg01;
4966 tree comp_type = TREE_TYPE (comp_op0);
4968 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4969 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4979 return pedantic_non_lvalue (fold_convert (type, arg2));
4981 return pedantic_non_lvalue (fold_convert (type, arg1));
4986 /* In C++ a ?: expression can be an lvalue, so put the
4987 operand which will be used if they are equal first
4988 so that we can convert this back to the
4989 corresponding COND_EXPR. */
4990 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4992 comp_op0 = fold_convert (comp_type, comp_op0);
4993 comp_op1 = fold_convert (comp_type, comp_op1);
4994 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4995 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4996 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4997 return pedantic_non_lvalue (fold_convert (type, tem));
5004 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5006 comp_op0 = fold_convert (comp_type, comp_op0);
5007 comp_op1 = fold_convert (comp_type, comp_op1);
5008 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5009 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5010 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5011 return pedantic_non_lvalue (fold_convert (type, tem));
5015 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5016 return pedantic_non_lvalue (fold_convert (type, arg2));
5019 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5020 return pedantic_non_lvalue (fold_convert (type, arg1));
5023 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5028 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5029 we might still be able to simplify this. For example,
5030 if C1 is one less or one more than C2, this might have started
5031 out as a MIN or MAX and been transformed by this function.
5032 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5034 if (INTEGRAL_TYPE_P (type)
5035 && TREE_CODE (arg01) == INTEGER_CST
5036 && TREE_CODE (arg2) == INTEGER_CST)
5040 /* We can replace A with C1 in this case. */
5041 arg1 = fold_convert (type, arg01);
5042 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5045 /* If C1 is C2 + 1, this is min(A, C2). */
5046 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5048 && operand_equal_p (arg01,
5049 const_binop (PLUS_EXPR, arg2,
5050 build_int_cst (type, 1), 0),
5052 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5054 fold_convert (type, arg1),
5059 /* If C1 is C2 - 1, this is min(A, C2). */
5060 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5062 && operand_equal_p (arg01,
5063 const_binop (MINUS_EXPR, arg2,
5064 build_int_cst (type, 1), 0),
5066 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5068 fold_convert (type, arg1),
5073 /* If C1 is C2 - 1, this is max(A, C2). */
5074 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5076 && operand_equal_p (arg01,
5077 const_binop (MINUS_EXPR, arg2,
5078 build_int_cst (type, 1), 0),
5080 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5082 fold_convert (type, arg1),
5087 /* If C1 is C2 + 1, this is max(A, C2). */
5088 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5090 && operand_equal_p (arg01,
5091 const_binop (PLUS_EXPR, arg2,
5092 build_int_cst (type, 1), 0),
5094 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5096 fold_convert (type, arg1),
5110 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5111 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5114 /* EXP is some logical combination of boolean tests. See if we can
5115 merge it into some range test. Return the new tree if so. */
5118 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5120 int or_op = (code == TRUTH_ORIF_EXPR
5121 || code == TRUTH_OR_EXPR);
5122 int in0_p, in1_p, in_p;
5123 tree low0, low1, low, high0, high1, high;
5124 bool strict_overflow_p = false;
5125 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5126 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5128 const char * const warnmsg = G_("assuming signed overflow does not occur "
5129 "when simplifying range test");
5131 /* If this is an OR operation, invert both sides; we will invert
5132 again at the end. */
5134 in0_p = ! in0_p, in1_p = ! in1_p;
5136 /* If both expressions are the same, if we can merge the ranges, and we
5137 can build the range test, return it or it inverted. If one of the
5138 ranges is always true or always false, consider it to be the same
5139 expression as the other. */
5140 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5141 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5143 && 0 != (tem = (build_range_check (type,
5145 : rhs != 0 ? rhs : integer_zero_node,
5148 if (strict_overflow_p)
5149 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5150 return or_op ? invert_truthvalue (tem) : tem;
5153 /* On machines where the branch cost is expensive, if this is a
5154 short-circuited branch and the underlying object on both sides
5155 is the same, make a non-short-circuit operation. */
5156 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5157 && lhs != 0 && rhs != 0
5158 && (code == TRUTH_ANDIF_EXPR
5159 || code == TRUTH_ORIF_EXPR)
5160 && operand_equal_p (lhs, rhs, 0))
5162 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5163 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5164 which cases we can't do this. */
5165 if (simple_operand_p (lhs))
5166 return build2 (code == TRUTH_ANDIF_EXPR
5167 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5170 else if (lang_hooks.decls.global_bindings_p () == 0
5171 && ! CONTAINS_PLACEHOLDER_P (lhs))
5173 tree common = save_expr (lhs);
5175 if (0 != (lhs = build_range_check (type, common,
5176 or_op ? ! in0_p : in0_p,
5178 && (0 != (rhs = build_range_check (type, common,
5179 or_op ? ! in1_p : in1_p,
5182 if (strict_overflow_p)
5183 fold_overflow_warning (warnmsg,
5184 WARN_STRICT_OVERFLOW_COMPARISON);
5185 return build2 (code == TRUTH_ANDIF_EXPR
5186 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5195 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5196 bit value. Arrange things so the extra bits will be set to zero if and
5197 only if C is signed-extended to its full width. If MASK is nonzero,
5198 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5201 unextend (tree c, int p, int unsignedp, tree mask)
5203 tree type = TREE_TYPE (c);
5204 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5207 if (p == modesize || unsignedp)
5210 /* We work by getting just the sign bit into the low-order bit, then
5211 into the high-order bit, then sign-extend. We then XOR that value
5213 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5214 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5216 /* We must use a signed type in order to get an arithmetic right shift.
5217 However, we must also avoid introducing accidental overflows, so that
5218 a subsequent call to integer_zerop will work. Hence we must
5219 do the type conversion here. At this point, the constant is either
5220 zero or one, and the conversion to a signed type can never overflow.
5221 We could get an overflow if this conversion is done anywhere else. */
5222 if (TYPE_UNSIGNED (type))
5223 temp = fold_convert (signed_type_for (type), temp);
5225 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5226 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5228 temp = const_binop (BIT_AND_EXPR, temp,
5229 fold_convert (TREE_TYPE (c), mask), 0);
5230 /* If necessary, convert the type back to match the type of C. */
5231 if (TYPE_UNSIGNED (type))
5232 temp = fold_convert (type, temp);
5234 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5237 /* Find ways of folding logical expressions of LHS and RHS:
5238 Try to merge two comparisons to the same innermost item.
5239 Look for range tests like "ch >= '0' && ch <= '9'".
5240 Look for combinations of simple terms on machines with expensive branches
5241 and evaluate the RHS unconditionally.
5243 For example, if we have p->a == 2 && p->b == 4 and we can make an
5244 object large enough to span both A and B, we can do this with a comparison
5245 against the object ANDed with the a mask.
5247 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5248 operations to do this with one comparison.
5250 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5251 function and the one above.
5253 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5254 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5256 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5259 We return the simplified tree or 0 if no optimization is possible. */
5262 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5264 /* If this is the "or" of two comparisons, we can do something if
5265 the comparisons are NE_EXPR. If this is the "and", we can do something
5266 if the comparisons are EQ_EXPR. I.e.,
5267 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5269 WANTED_CODE is this operation code. For single bit fields, we can
5270 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5271 comparison for one-bit fields. */
5273 enum tree_code wanted_code;
5274 enum tree_code lcode, rcode;
5275 tree ll_arg, lr_arg, rl_arg, rr_arg;
5276 tree ll_inner, lr_inner, rl_inner, rr_inner;
5277 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5278 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5279 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5280 HOST_WIDE_INT lnbitsize, lnbitpos;
5281 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5282 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5283 enum machine_mode lnmode;
5284 tree ll_mask, lr_mask, rl_mask, rr_mask;
5285 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5286 tree l_const, r_const;
5287 tree lntype, result;
5288 int first_bit, end_bit;
5290 tree orig_lhs = lhs, orig_rhs = rhs;
5291 enum tree_code orig_code = code;
5293 /* Start by getting the comparison codes. Fail if anything is volatile.
5294 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5295 it were surrounded with a NE_EXPR. */
5297 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5300 lcode = TREE_CODE (lhs);
5301 rcode = TREE_CODE (rhs);
5303 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5305 lhs = build2 (NE_EXPR, truth_type, lhs,
5306 build_int_cst (TREE_TYPE (lhs), 0));
5310 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5312 rhs = build2 (NE_EXPR, truth_type, rhs,
5313 build_int_cst (TREE_TYPE (rhs), 0));
5317 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5318 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5321 ll_arg = TREE_OPERAND (lhs, 0);
5322 lr_arg = TREE_OPERAND (lhs, 1);
5323 rl_arg = TREE_OPERAND (rhs, 0);
5324 rr_arg = TREE_OPERAND (rhs, 1);
5326 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5327 if (simple_operand_p (ll_arg)
5328 && simple_operand_p (lr_arg))
5331 if (operand_equal_p (ll_arg, rl_arg, 0)
5332 && operand_equal_p (lr_arg, rr_arg, 0))
5334 result = combine_comparisons (code, lcode, rcode,
5335 truth_type, ll_arg, lr_arg);
5339 else if (operand_equal_p (ll_arg, rr_arg, 0)
5340 && operand_equal_p (lr_arg, rl_arg, 0))
5342 result = combine_comparisons (code, lcode,
5343 swap_tree_comparison (rcode),
5344 truth_type, ll_arg, lr_arg);
5350 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5351 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5353 /* If the RHS can be evaluated unconditionally and its operands are
5354 simple, it wins to evaluate the RHS unconditionally on machines
5355 with expensive branches. In this case, this isn't a comparison
5356 that can be merged. Avoid doing this if the RHS is a floating-point
5357 comparison since those can trap. */
5359 if (BRANCH_COST >= 2
5360 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5361 && simple_operand_p (rl_arg)
5362 && simple_operand_p (rr_arg))
5364 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5365 if (code == TRUTH_OR_EXPR
5366 && lcode == NE_EXPR && integer_zerop (lr_arg)
5367 && rcode == NE_EXPR && integer_zerop (rr_arg)
5368 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5369 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5370 return build2 (NE_EXPR, truth_type,
5371 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5373 build_int_cst (TREE_TYPE (ll_arg), 0));
5375 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5376 if (code == TRUTH_AND_EXPR
5377 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5378 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5379 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5380 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5381 return build2 (EQ_EXPR, truth_type,
5382 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5384 build_int_cst (TREE_TYPE (ll_arg), 0));
5386 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5388 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5389 return build2 (code, truth_type, lhs, rhs);
5394 /* See if the comparisons can be merged. Then get all the parameters for
5397 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5398 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5402 ll_inner = decode_field_reference (ll_arg,
5403 &ll_bitsize, &ll_bitpos, &ll_mode,
5404 &ll_unsignedp, &volatilep, &ll_mask,
5406 lr_inner = decode_field_reference (lr_arg,
5407 &lr_bitsize, &lr_bitpos, &lr_mode,
5408 &lr_unsignedp, &volatilep, &lr_mask,
5410 rl_inner = decode_field_reference (rl_arg,
5411 &rl_bitsize, &rl_bitpos, &rl_mode,
5412 &rl_unsignedp, &volatilep, &rl_mask,
5414 rr_inner = decode_field_reference (rr_arg,
5415 &rr_bitsize, &rr_bitpos, &rr_mode,
5416 &rr_unsignedp, &volatilep, &rr_mask,
5419 /* It must be true that the inner operation on the lhs of each
5420 comparison must be the same if we are to be able to do anything.
5421 Then see if we have constants. If not, the same must be true for
5423 if (volatilep || ll_inner == 0 || rl_inner == 0
5424 || ! operand_equal_p (ll_inner, rl_inner, 0))
5427 if (TREE_CODE (lr_arg) == INTEGER_CST
5428 && TREE_CODE (rr_arg) == INTEGER_CST)
5429 l_const = lr_arg, r_const = rr_arg;
5430 else if (lr_inner == 0 || rr_inner == 0
5431 || ! operand_equal_p (lr_inner, rr_inner, 0))
5434 l_const = r_const = 0;
5436 /* If either comparison code is not correct for our logical operation,
5437 fail. However, we can convert a one-bit comparison against zero into
5438 the opposite comparison against that bit being set in the field. */
5440 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5441 if (lcode != wanted_code)
5443 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5445 /* Make the left operand unsigned, since we are only interested
5446 in the value of one bit. Otherwise we are doing the wrong
5455 /* This is analogous to the code for l_const above. */
5456 if (rcode != wanted_code)
5458 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5467 /* See if we can find a mode that contains both fields being compared on
5468 the left. If we can't, fail. Otherwise, update all constants and masks
5469 to be relative to a field of that size. */
5470 first_bit = MIN (ll_bitpos, rl_bitpos);
5471 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5472 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5473 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5475 if (lnmode == VOIDmode)
5478 lnbitsize = GET_MODE_BITSIZE (lnmode);
5479 lnbitpos = first_bit & ~ (lnbitsize - 1);
5480 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5481 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5483 if (BYTES_BIG_ENDIAN)
5485 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5486 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5489 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5490 size_int (xll_bitpos), 0);
5491 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5492 size_int (xrl_bitpos), 0);
5496 l_const = fold_convert (lntype, l_const);
5497 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5498 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5499 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5500 fold_build1 (BIT_NOT_EXPR,
5504 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5506 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5511 r_const = fold_convert (lntype, r_const);
5512 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5513 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5514 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5515 fold_build1 (BIT_NOT_EXPR,
5519 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5521 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5525 /* Handle the case of comparisons with constants. If there is something in
5526 common between the masks, those bits of the constants must be the same.
5527 If not, the condition is always false. Test for this to avoid generating
5528 incorrect code below. */
5529 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5530 if (! integer_zerop (result)
5531 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5532 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5534 if (wanted_code == NE_EXPR)
5536 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5537 return constant_boolean_node (true, truth_type);
5541 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5542 return constant_boolean_node (false, truth_type);
5549 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5553 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5556 enum tree_code op_code;
5559 int consts_equal, consts_lt;
5562 STRIP_SIGN_NOPS (arg0);
5564 op_code = TREE_CODE (arg0);
5565 minmax_const = TREE_OPERAND (arg0, 1);
5566 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5567 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5568 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5569 inner = TREE_OPERAND (arg0, 0);
5571 /* If something does not permit us to optimize, return the original tree. */
5572 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5573 || TREE_CODE (comp_const) != INTEGER_CST
5574 || TREE_OVERFLOW (comp_const)
5575 || TREE_CODE (minmax_const) != INTEGER_CST
5576 || TREE_OVERFLOW (minmax_const))
5579 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5580 and GT_EXPR, doing the rest with recursive calls using logical
5584 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5586 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5589 return invert_truthvalue (tem);
5595 fold_build2 (TRUTH_ORIF_EXPR, type,
5596 optimize_minmax_comparison
5597 (EQ_EXPR, type, arg0, comp_const),
5598 optimize_minmax_comparison
5599 (GT_EXPR, type, arg0, comp_const));
5602 if (op_code == MAX_EXPR && consts_equal)
5603 /* MAX (X, 0) == 0 -> X <= 0 */
5604 return fold_build2 (LE_EXPR, type, inner, comp_const);
5606 else if (op_code == MAX_EXPR && consts_lt)
5607 /* MAX (X, 0) == 5 -> X == 5 */
5608 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5610 else if (op_code == MAX_EXPR)
5611 /* MAX (X, 0) == -1 -> false */
5612 return omit_one_operand (type, integer_zero_node, inner);
5614 else if (consts_equal)
5615 /* MIN (X, 0) == 0 -> X >= 0 */
5616 return fold_build2 (GE_EXPR, type, inner, comp_const);
5619 /* MIN (X, 0) == 5 -> false */
5620 return omit_one_operand (type, integer_zero_node, inner);
5623 /* MIN (X, 0) == -1 -> X == -1 */
5624 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5627 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5628 /* MAX (X, 0) > 0 -> X > 0
5629 MAX (X, 0) > 5 -> X > 5 */
5630 return fold_build2 (GT_EXPR, type, inner, comp_const);
5632 else if (op_code == MAX_EXPR)
5633 /* MAX (X, 0) > -1 -> true */
5634 return omit_one_operand (type, integer_one_node, inner);
5636 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5637 /* MIN (X, 0) > 0 -> false
5638 MIN (X, 0) > 5 -> false */
5639 return omit_one_operand (type, integer_zero_node, inner);
5642 /* MIN (X, 0) > -1 -> X > -1 */
5643 return fold_build2 (GT_EXPR, type, inner, comp_const);
5650 /* T is an integer expression that is being multiplied, divided, or taken a
5651 modulus (CODE says which and what kind of divide or modulus) by a
5652 constant C. See if we can eliminate that operation by folding it with
5653 other operations already in T. WIDE_TYPE, if non-null, is a type that
5654 should be used for the computation if wider than our type.
5656 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5657 (X * 2) + (Y * 4). We must, however, be assured that either the original
5658 expression would not overflow or that overflow is undefined for the type
5659 in the language in question.
5661 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5662 the machine has a multiply-accumulate insn or that this is part of an
5663 addressing calculation.
5665 If we return a non-null expression, it is an equivalent form of the
5666 original computation, but need not be in the original type.
5668 We set *STRICT_OVERFLOW_P to true if the return values depends on
5669 signed overflow being undefined. Otherwise we do not change
5670 *STRICT_OVERFLOW_P. */
5673 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5674 bool *strict_overflow_p)
5676 /* To avoid exponential search depth, refuse to allow recursion past
5677 three levels. Beyond that (1) it's highly unlikely that we'll find
5678 something interesting and (2) we've probably processed it before
5679 when we built the inner expression. */
5688 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5695 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5696 bool *strict_overflow_p)
5698 tree type = TREE_TYPE (t);
5699 enum tree_code tcode = TREE_CODE (t);
5700 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5701 > GET_MODE_SIZE (TYPE_MODE (type)))
5702 ? wide_type : type);
5704 int same_p = tcode == code;
5705 tree op0 = NULL_TREE, op1 = NULL_TREE;
5706 bool sub_strict_overflow_p;
5708 /* Don't deal with constants of zero here; they confuse the code below. */
5709 if (integer_zerop (c))
5712 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5713 op0 = TREE_OPERAND (t, 0);
5715 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5716 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5718 /* Note that we need not handle conditional operations here since fold
5719 already handles those cases. So just do arithmetic here. */
5723 /* For a constant, we can always simplify if we are a multiply
5724 or (for divide and modulus) if it is a multiple of our constant. */
5725 if (code == MULT_EXPR
5726 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5727 return const_binop (code, fold_convert (ctype, t),
5728 fold_convert (ctype, c), 0);
5731 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5732 /* If op0 is an expression ... */
5733 if ((COMPARISON_CLASS_P (op0)
5734 || UNARY_CLASS_P (op0)
5735 || BINARY_CLASS_P (op0)
5736 || VL_EXP_CLASS_P (op0)
5737 || EXPRESSION_CLASS_P (op0))
5738 /* ... and is unsigned, and its type is smaller than ctype,
5739 then we cannot pass through as widening. */
5740 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5741 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5742 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5743 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5744 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5745 /* ... or this is a truncation (t is narrower than op0),
5746 then we cannot pass through this narrowing. */
5747 || (GET_MODE_SIZE (TYPE_MODE (type))
5748 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5749 /* ... or signedness changes for division or modulus,
5750 then we cannot pass through this conversion. */
5751 || (code != MULT_EXPR
5752 && (TYPE_UNSIGNED (ctype)
5753 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5754 /* ... or has undefined overflow while the converted to
5755 type has not, we cannot do the operation in the inner type
5756 as that would introduce undefined overflow. */
5757 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5758 && !TYPE_OVERFLOW_UNDEFINED (type))))
5761 /* Pass the constant down and see if we can make a simplification. If
5762 we can, replace this expression with the inner simplification for
5763 possible later conversion to our or some other type. */
5764 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5765 && TREE_CODE (t2) == INTEGER_CST
5766 && !TREE_OVERFLOW (t2)
5767 && (0 != (t1 = extract_muldiv (op0, t2, code,
5769 ? ctype : NULL_TREE,
5770 strict_overflow_p))))
5775 /* If widening the type changes it from signed to unsigned, then we
5776 must avoid building ABS_EXPR itself as unsigned. */
5777 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5779 tree cstype = (*signed_type_for) (ctype);
5780 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5783 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5784 return fold_convert (ctype, t1);
5788 /* If the constant is negative, we cannot simplify this. */
5789 if (tree_int_cst_sgn (c) == -1)
5793 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5795 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5798 case MIN_EXPR: case MAX_EXPR:
5799 /* If widening the type changes the signedness, then we can't perform
5800 this optimization as that changes the result. */
5801 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5804 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5805 sub_strict_overflow_p = false;
5806 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5807 &sub_strict_overflow_p)) != 0
5808 && (t2 = extract_muldiv (op1, c, code, wide_type,
5809 &sub_strict_overflow_p)) != 0)
5811 if (tree_int_cst_sgn (c) < 0)
5812 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5813 if (sub_strict_overflow_p)
5814 *strict_overflow_p = true;
5815 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5816 fold_convert (ctype, t2));
5820 case LSHIFT_EXPR: case RSHIFT_EXPR:
5821 /* If the second operand is constant, this is a multiplication
5822 or floor division, by a power of two, so we can treat it that
5823 way unless the multiplier or divisor overflows. Signed
5824 left-shift overflow is implementation-defined rather than
5825 undefined in C90, so do not convert signed left shift into
5827 if (TREE_CODE (op1) == INTEGER_CST
5828 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5829 /* const_binop may not detect overflow correctly,
5830 so check for it explicitly here. */
5831 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5832 && TREE_INT_CST_HIGH (op1) == 0
5833 && 0 != (t1 = fold_convert (ctype,
5834 const_binop (LSHIFT_EXPR,
5837 && !TREE_OVERFLOW (t1))
5838 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5839 ? MULT_EXPR : FLOOR_DIV_EXPR,
5840 ctype, fold_convert (ctype, op0), t1),
5841 c, code, wide_type, strict_overflow_p);
5844 case PLUS_EXPR: case MINUS_EXPR:
5845 /* See if we can eliminate the operation on both sides. If we can, we
5846 can return a new PLUS or MINUS. If we can't, the only remaining
5847 cases where we can do anything are if the second operand is a
5849 sub_strict_overflow_p = false;
5850 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5851 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5852 if (t1 != 0 && t2 != 0
5853 && (code == MULT_EXPR
5854 /* If not multiplication, we can only do this if both operands
5855 are divisible by c. */
5856 || (multiple_of_p (ctype, op0, c)
5857 && multiple_of_p (ctype, op1, c))))
5859 if (sub_strict_overflow_p)
5860 *strict_overflow_p = true;
5861 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5862 fold_convert (ctype, t2));
5865 /* If this was a subtraction, negate OP1 and set it to be an addition.
5866 This simplifies the logic below. */
5867 if (tcode == MINUS_EXPR)
5868 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5870 if (TREE_CODE (op1) != INTEGER_CST)
5873 /* If either OP1 or C are negative, this optimization is not safe for
5874 some of the division and remainder types while for others we need
5875 to change the code. */
5876 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5878 if (code == CEIL_DIV_EXPR)
5879 code = FLOOR_DIV_EXPR;
5880 else if (code == FLOOR_DIV_EXPR)
5881 code = CEIL_DIV_EXPR;
5882 else if (code != MULT_EXPR
5883 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5887 /* If it's a multiply or a division/modulus operation of a multiple
5888 of our constant, do the operation and verify it doesn't overflow. */
5889 if (code == MULT_EXPR
5890 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5892 op1 = const_binop (code, fold_convert (ctype, op1),
5893 fold_convert (ctype, c), 0);
5894 /* We allow the constant to overflow with wrapping semantics. */
5896 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5902 /* If we have an unsigned type is not a sizetype, we cannot widen
5903 the operation since it will change the result if the original
5904 computation overflowed. */
5905 if (TYPE_UNSIGNED (ctype)
5906 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5910 /* If we were able to eliminate our operation from the first side,
5911 apply our operation to the second side and reform the PLUS. */
5912 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5913 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5915 /* The last case is if we are a multiply. In that case, we can
5916 apply the distributive law to commute the multiply and addition
5917 if the multiplication of the constants doesn't overflow. */
5918 if (code == MULT_EXPR)
5919 return fold_build2 (tcode, ctype,
5920 fold_build2 (code, ctype,
5921 fold_convert (ctype, op0),
5922 fold_convert (ctype, c)),
5928 /* We have a special case here if we are doing something like
5929 (C * 8) % 4 since we know that's zero. */
5930 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5931 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5932 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5933 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5934 return omit_one_operand (type, integer_zero_node, op0);
5936 /* ... fall through ... */
5938 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5939 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5940 /* If we can extract our operation from the LHS, do so and return a
5941 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5942 do something only if the second operand is a constant. */
5944 && (t1 = extract_muldiv (op0, c, code, wide_type,
5945 strict_overflow_p)) != 0)
5946 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5947 fold_convert (ctype, op1));
5948 else if (tcode == MULT_EXPR && code == MULT_EXPR
5949 && (t1 = extract_muldiv (op1, c, code, wide_type,
5950 strict_overflow_p)) != 0)
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5952 fold_convert (ctype, t1));
5953 else if (TREE_CODE (op1) != INTEGER_CST)
5956 /* If these are the same operation types, we can associate them
5957 assuming no overflow. */
5959 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5960 fold_convert (ctype, c), 1))
5961 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5962 TREE_INT_CST_HIGH (t1),
5963 (TYPE_UNSIGNED (ctype)
5964 && tcode != MULT_EXPR) ? -1 : 1,
5965 TREE_OVERFLOW (t1)))
5966 && !TREE_OVERFLOW (t1))
5967 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5969 /* If these operations "cancel" each other, we have the main
5970 optimizations of this pass, which occur when either constant is a
5971 multiple of the other, in which case we replace this with either an
5972 operation or CODE or TCODE.
5974 If we have an unsigned type that is not a sizetype, we cannot do
5975 this since it will change the result if the original computation
5977 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5978 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5979 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5980 || (tcode == MULT_EXPR
5981 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5982 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5983 && code != MULT_EXPR)))
5985 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5987 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5988 *strict_overflow_p = true;
5989 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5990 fold_convert (ctype,
5991 const_binop (TRUNC_DIV_EXPR,
5994 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5996 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5997 *strict_overflow_p = true;
5998 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5999 fold_convert (ctype,
6000 const_binop (TRUNC_DIV_EXPR,
6013 /* Return a node which has the indicated constant VALUE (either 0 or
6014 1), and is of the indicated TYPE. */
6017 constant_boolean_node (int value, tree type)
6019 if (type == integer_type_node)
6020 return value ? integer_one_node : integer_zero_node;
6021 else if (type == boolean_type_node)
6022 return value ? boolean_true_node : boolean_false_node;
6024 return build_int_cst (type, value);
6028 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6029 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6030 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6031 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6032 COND is the first argument to CODE; otherwise (as in the example
6033 given here), it is the second argument. TYPE is the type of the
6034 original expression. Return NULL_TREE if no simplification is
6038 fold_binary_op_with_conditional_arg (enum tree_code code,
6039 tree type, tree op0, tree op1,
6040 tree cond, tree arg, int cond_first_p)
6042 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6043 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6044 tree test, true_value, false_value;
6045 tree lhs = NULL_TREE;
6046 tree rhs = NULL_TREE;
6048 /* This transformation is only worthwhile if we don't have to wrap
6049 arg in a SAVE_EXPR, and the operation can be simplified on at least
6050 one of the branches once its pushed inside the COND_EXPR. */
6051 if (!TREE_CONSTANT (arg))
6054 if (TREE_CODE (cond) == COND_EXPR)
6056 test = TREE_OPERAND (cond, 0);
6057 true_value = TREE_OPERAND (cond, 1);
6058 false_value = TREE_OPERAND (cond, 2);
6059 /* If this operand throws an expression, then it does not make
6060 sense to try to perform a logical or arithmetic operation
6062 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6064 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6069 tree testtype = TREE_TYPE (cond);
6071 true_value = constant_boolean_node (true, testtype);
6072 false_value = constant_boolean_node (false, testtype);
6075 arg = fold_convert (arg_type, arg);
6078 true_value = fold_convert (cond_type, true_value);
6080 lhs = fold_build2 (code, type, true_value, arg);
6082 lhs = fold_build2 (code, type, arg, true_value);
6086 false_value = fold_convert (cond_type, false_value);
6088 rhs = fold_build2 (code, type, false_value, arg);
6090 rhs = fold_build2 (code, type, arg, false_value);
6093 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6094 return fold_convert (type, test);
6098 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6100 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6101 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6102 ADDEND is the same as X.
6104 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6105 and finite. The problematic cases are when X is zero, and its mode
6106 has signed zeros. In the case of rounding towards -infinity,
6107 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6108 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6111 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6113 if (!real_zerop (addend))
6116 /* Don't allow the fold with -fsignaling-nans. */
6117 if (HONOR_SNANS (TYPE_MODE (type)))
6120 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6121 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6124 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6125 if (TREE_CODE (addend) == REAL_CST
6126 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6129 /* The mode has signed zeros, and we have to honor their sign.
6130 In this situation, there is only one case we can return true for.
6131 X - 0 is the same as X unless rounding towards -infinity is
6133 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6136 /* Subroutine of fold() that checks comparisons of built-in math
6137 functions against real constants.
6139 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6140 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6141 is the type of the result and ARG0 and ARG1 are the operands of the
6142 comparison. ARG1 must be a TREE_REAL_CST.
6144 The function returns the constant folded tree if a simplification
6145 can be made, and NULL_TREE otherwise. */
6148 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6149 tree type, tree arg0, tree arg1)
6153 if (BUILTIN_SQRT_P (fcode))
6155 tree arg = CALL_EXPR_ARG (arg0, 0);
6156 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6158 c = TREE_REAL_CST (arg1);
6159 if (REAL_VALUE_NEGATIVE (c))
6161 /* sqrt(x) < y is always false, if y is negative. */
6162 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6163 return omit_one_operand (type, integer_zero_node, arg);
6165 /* sqrt(x) > y is always true, if y is negative and we
6166 don't care about NaNs, i.e. negative values of x. */
6167 if (code == NE_EXPR || !HONOR_NANS (mode))
6168 return omit_one_operand (type, integer_one_node, arg);
6170 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6171 return fold_build2 (GE_EXPR, type, arg,
6172 build_real (TREE_TYPE (arg), dconst0));
6174 else if (code == GT_EXPR || code == GE_EXPR)
6178 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6179 real_convert (&c2, mode, &c2);
6181 if (REAL_VALUE_ISINF (c2))
6183 /* sqrt(x) > y is x == +Inf, when y is very large. */
6184 if (HONOR_INFINITIES (mode))
6185 return fold_build2 (EQ_EXPR, type, arg,
6186 build_real (TREE_TYPE (arg), c2));
6188 /* sqrt(x) > y is always false, when y is very large
6189 and we don't care about infinities. */
6190 return omit_one_operand (type, integer_zero_node, arg);
6193 /* sqrt(x) > c is the same as x > c*c. */
6194 return fold_build2 (code, type, arg,
6195 build_real (TREE_TYPE (arg), c2));
6197 else if (code == LT_EXPR || code == LE_EXPR)
6201 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6202 real_convert (&c2, mode, &c2);
6204 if (REAL_VALUE_ISINF (c2))
6206 /* sqrt(x) < y is always true, when y is a very large
6207 value and we don't care about NaNs or Infinities. */
6208 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6209 return omit_one_operand (type, integer_one_node, arg);
6211 /* sqrt(x) < y is x != +Inf when y is very large and we
6212 don't care about NaNs. */
6213 if (! HONOR_NANS (mode))
6214 return fold_build2 (NE_EXPR, type, arg,
6215 build_real (TREE_TYPE (arg), c2));
6217 /* sqrt(x) < y is x >= 0 when y is very large and we
6218 don't care about Infinities. */
6219 if (! HONOR_INFINITIES (mode))
6220 return fold_build2 (GE_EXPR, type, arg,
6221 build_real (TREE_TYPE (arg), dconst0));
6223 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6224 if (lang_hooks.decls.global_bindings_p () != 0
6225 || CONTAINS_PLACEHOLDER_P (arg))
6228 arg = save_expr (arg);
6229 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6230 fold_build2 (GE_EXPR, type, arg,
6231 build_real (TREE_TYPE (arg),
6233 fold_build2 (NE_EXPR, type, arg,
6234 build_real (TREE_TYPE (arg),
6238 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6239 if (! HONOR_NANS (mode))
6240 return fold_build2 (code, type, arg,
6241 build_real (TREE_TYPE (arg), c2));
6243 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6244 if (lang_hooks.decls.global_bindings_p () == 0
6245 && ! CONTAINS_PLACEHOLDER_P (arg))
6247 arg = save_expr (arg);
6248 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6249 fold_build2 (GE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg),
6252 fold_build2 (code, type, arg,
6253 build_real (TREE_TYPE (arg),
6262 /* Subroutine of fold() that optimizes comparisons against Infinities,
6263 either +Inf or -Inf.
6265 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6266 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6267 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6269 The function returns the constant folded tree if a simplification
6270 can be made, and NULL_TREE otherwise. */
6273 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6275 enum machine_mode mode;
6276 REAL_VALUE_TYPE max;
6280 mode = TYPE_MODE (TREE_TYPE (arg0));
6282 /* For negative infinity swap the sense of the comparison. */
6283 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6285 code = swap_tree_comparison (code);
6290 /* x > +Inf is always false, if with ignore sNANs. */
6291 if (HONOR_SNANS (mode))
6293 return omit_one_operand (type, integer_zero_node, arg0);
6296 /* x <= +Inf is always true, if we don't case about NaNs. */
6297 if (! HONOR_NANS (mode))
6298 return omit_one_operand (type, integer_one_node, arg0);
6300 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6301 if (lang_hooks.decls.global_bindings_p () == 0
6302 && ! CONTAINS_PLACEHOLDER_P (arg0))
6304 arg0 = save_expr (arg0);
6305 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6311 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6312 real_maxval (&max, neg, mode);
6313 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6314 arg0, build_real (TREE_TYPE (arg0), max));
6317 /* x < +Inf is always equal to x <= DBL_MAX. */
6318 real_maxval (&max, neg, mode);
6319 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6320 arg0, build_real (TREE_TYPE (arg0), max));
6323 /* x != +Inf is always equal to !(x > DBL_MAX). */
6324 real_maxval (&max, neg, mode);
6325 if (! HONOR_NANS (mode))
6326 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6327 arg0, build_real (TREE_TYPE (arg0), max));
6329 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6331 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6340 /* Subroutine of fold() that optimizes comparisons of a division by
6341 a nonzero integer constant against an integer constant, i.e.
6344 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6345 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6346 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6348 The function returns the constant folded tree if a simplification
6349 can be made, and NULL_TREE otherwise. */
6352 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6354 tree prod, tmp, hi, lo;
6355 tree arg00 = TREE_OPERAND (arg0, 0);
6356 tree arg01 = TREE_OPERAND (arg0, 1);
6357 unsigned HOST_WIDE_INT lpart;
6358 HOST_WIDE_INT hpart;
6359 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6363 /* We have to do this the hard way to detect unsigned overflow.
6364 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6365 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6366 TREE_INT_CST_HIGH (arg01),
6367 TREE_INT_CST_LOW (arg1),
6368 TREE_INT_CST_HIGH (arg1),
6369 &lpart, &hpart, unsigned_p);
6370 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6372 neg_overflow = false;
6376 tmp = int_const_binop (MINUS_EXPR, arg01,
6377 build_int_cst (TREE_TYPE (arg01), 1), 0);
6380 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6381 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6382 TREE_INT_CST_HIGH (prod),
6383 TREE_INT_CST_LOW (tmp),
6384 TREE_INT_CST_HIGH (tmp),
6385 &lpart, &hpart, unsigned_p);
6386 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6387 -1, overflow | TREE_OVERFLOW (prod));
6389 else if (tree_int_cst_sgn (arg01) >= 0)
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1), 0);
6393 switch (tree_int_cst_sgn (arg1))
6396 neg_overflow = true;
6397 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6402 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6407 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6417 /* A negative divisor reverses the relational operators. */
6418 code = swap_tree_comparison (code);
6420 tmp = int_const_binop (PLUS_EXPR, arg01,
6421 build_int_cst (TREE_TYPE (arg01), 1), 0);
6422 switch (tree_int_cst_sgn (arg1))
6425 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6430 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6435 neg_overflow = true;
6436 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6448 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6449 return omit_one_operand (type, integer_zero_node, arg00);
6450 if (TREE_OVERFLOW (hi))
6451 return fold_build2 (GE_EXPR, type, arg00, lo);
6452 if (TREE_OVERFLOW (lo))
6453 return fold_build2 (LE_EXPR, type, arg00, hi);
6454 return build_range_check (type, arg00, 1, lo, hi);
6457 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6458 return omit_one_operand (type, integer_one_node, arg00);
6459 if (TREE_OVERFLOW (hi))
6460 return fold_build2 (LT_EXPR, type, arg00, lo);
6461 if (TREE_OVERFLOW (lo))
6462 return fold_build2 (GT_EXPR, type, arg00, hi);
6463 return build_range_check (type, arg00, 0, lo, hi);
6466 if (TREE_OVERFLOW (lo))
6468 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6469 return omit_one_operand (type, tmp, arg00);
6471 return fold_build2 (LT_EXPR, type, arg00, lo);
6474 if (TREE_OVERFLOW (hi))
6476 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6477 return omit_one_operand (type, tmp, arg00);
6479 return fold_build2 (LE_EXPR, type, arg00, hi);
6482 if (TREE_OVERFLOW (hi))
6484 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6485 return omit_one_operand (type, tmp, arg00);
6487 return fold_build2 (GT_EXPR, type, arg00, hi);
6490 if (TREE_OVERFLOW (lo))
6492 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6493 return omit_one_operand (type, tmp, arg00);
6495 return fold_build2 (GE_EXPR, type, arg00, lo);
6505 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6506 equality/inequality test, then return a simplified form of the test
6507 using a sign testing. Otherwise return NULL. TYPE is the desired
6511 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6514 /* If this is testing a single bit, we can optimize the test. */
6515 if ((code == NE_EXPR || code == EQ_EXPR)
6516 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6517 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6519 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6520 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6521 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6523 if (arg00 != NULL_TREE
6524 /* This is only a win if casting to a signed type is cheap,
6525 i.e. when arg00's type is not a partial mode. */
6526 && TYPE_PRECISION (TREE_TYPE (arg00))
6527 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6529 tree stype = signed_type_for (TREE_TYPE (arg00));
6530 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6531 result_type, fold_convert (stype, arg00),
6532 build_int_cst (stype, 0));
6539 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6540 equality/inequality test, then return a simplified form of
6541 the test using shifts and logical operations. Otherwise return
6542 NULL. TYPE is the desired result type. */
6545 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6548 /* If this is testing a single bit, we can optimize the test. */
6549 if ((code == NE_EXPR || code == EQ_EXPR)
6550 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6551 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6553 tree inner = TREE_OPERAND (arg0, 0);
6554 tree type = TREE_TYPE (arg0);
6555 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6556 enum machine_mode operand_mode = TYPE_MODE (type);
6558 tree signed_type, unsigned_type, intermediate_type;
6561 /* First, see if we can fold the single bit test into a sign-bit
6563 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6568 /* Otherwise we have (A & C) != 0 where C is a single bit,
6569 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6570 Similarly for (A & C) == 0. */
6572 /* If INNER is a right shift of a constant and it plus BITNUM does
6573 not overflow, adjust BITNUM and INNER. */
6574 if (TREE_CODE (inner) == RSHIFT_EXPR
6575 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6576 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6577 && bitnum < TYPE_PRECISION (type)
6578 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6579 bitnum - TYPE_PRECISION (type)))
6581 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6582 inner = TREE_OPERAND (inner, 0);
6585 /* If we are going to be able to omit the AND below, we must do our
6586 operations as unsigned. If we must use the AND, we have a choice.
6587 Normally unsigned is faster, but for some machines signed is. */
6588 #ifdef LOAD_EXTEND_OP
6589 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6590 && !flag_syntax_only) ? 0 : 1;
6595 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6596 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6597 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6598 inner = fold_convert (intermediate_type, inner);
6601 inner = build2 (RSHIFT_EXPR, intermediate_type,
6602 inner, size_int (bitnum));
6604 one = build_int_cst (intermediate_type, 1);
6606 if (code == EQ_EXPR)
6607 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6609 /* Put the AND last so it can combine with more things. */
6610 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6612 /* Make sure to return the proper type. */
6613 inner = fold_convert (result_type, inner);
6620 /* Check whether we are allowed to reorder operands arg0 and arg1,
6621 such that the evaluation of arg1 occurs before arg0. */
6624 reorder_operands_p (const_tree arg0, const_tree arg1)
6626 if (! flag_evaluation_order)
6628 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6630 return ! TREE_SIDE_EFFECTS (arg0)
6631 && ! TREE_SIDE_EFFECTS (arg1);
6634 /* Test whether it is preferable two swap two operands, ARG0 and
6635 ARG1, for example because ARG0 is an integer constant and ARG1
6636 isn't. If REORDER is true, only recommend swapping if we can
6637 evaluate the operands in reverse order. */
6640 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6642 STRIP_SIGN_NOPS (arg0);
6643 STRIP_SIGN_NOPS (arg1);
6645 if (TREE_CODE (arg1) == INTEGER_CST)
6647 if (TREE_CODE (arg0) == INTEGER_CST)
6650 if (TREE_CODE (arg1) == REAL_CST)
6652 if (TREE_CODE (arg0) == REAL_CST)
6655 if (TREE_CODE (arg1) == FIXED_CST)
6657 if (TREE_CODE (arg0) == FIXED_CST)
6660 if (TREE_CODE (arg1) == COMPLEX_CST)
6662 if (TREE_CODE (arg0) == COMPLEX_CST)
6665 if (TREE_CONSTANT (arg1))
6667 if (TREE_CONSTANT (arg0))
6673 if (reorder && flag_evaluation_order
6674 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6677 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6678 for commutative and comparison operators. Ensuring a canonical
6679 form allows the optimizers to find additional redundancies without
6680 having to explicitly check for both orderings. */
6681 if (TREE_CODE (arg0) == SSA_NAME
6682 && TREE_CODE (arg1) == SSA_NAME
6683 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6686 /* Put SSA_NAMEs last. */
6687 if (TREE_CODE (arg1) == SSA_NAME)
6689 if (TREE_CODE (arg0) == SSA_NAME)
6692 /* Put variables last. */
6701 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6702 ARG0 is extended to a wider type. */
6705 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6707 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6709 tree shorter_type, outer_type;
6713 if (arg0_unw == arg0)
6715 shorter_type = TREE_TYPE (arg0_unw);
6717 #ifdef HAVE_canonicalize_funcptr_for_compare
6718 /* Disable this optimization if we're casting a function pointer
6719 type on targets that require function pointer canonicalization. */
6720 if (HAVE_canonicalize_funcptr_for_compare
6721 && TREE_CODE (shorter_type) == POINTER_TYPE
6722 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6726 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6729 arg1_unw = get_unwidened (arg1, NULL_TREE);
6731 /* If possible, express the comparison in the shorter mode. */
6732 if ((code == EQ_EXPR || code == NE_EXPR
6733 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6734 && (TREE_TYPE (arg1_unw) == shorter_type
6735 || (TYPE_PRECISION (shorter_type)
6736 > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6737 || ((TYPE_PRECISION (shorter_type)
6738 == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6739 && (TYPE_UNSIGNED (shorter_type)
6740 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6741 || (TREE_CODE (arg1_unw) == INTEGER_CST
6742 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6743 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6744 && int_fits_type_p (arg1_unw, shorter_type))))
6745 return fold_build2 (code, type, arg0_unw,
6746 fold_convert (shorter_type, arg1_unw));
6748 if (TREE_CODE (arg1_unw) != INTEGER_CST
6749 || TREE_CODE (shorter_type) != INTEGER_TYPE
6750 || !int_fits_type_p (arg1_unw, shorter_type))
6753 /* If we are comparing with the integer that does not fit into the range
6754 of the shorter type, the result is known. */
6755 outer_type = TREE_TYPE (arg1_unw);
6756 min = lower_bound_in_type (outer_type, shorter_type);
6757 max = upper_bound_in_type (outer_type, shorter_type);
6759 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6761 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6768 return omit_one_operand (type, integer_zero_node, arg0);
6773 return omit_one_operand (type, integer_one_node, arg0);
6779 return omit_one_operand (type, integer_one_node, arg0);
6781 return omit_one_operand (type, integer_zero_node, arg0);
6786 return omit_one_operand (type, integer_zero_node, arg0);
6788 return omit_one_operand (type, integer_one_node, arg0);
6797 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6798 ARG0 just the signedness is changed. */
6801 fold_sign_changed_comparison (enum tree_code code, tree type,
6802 tree arg0, tree arg1)
6805 tree inner_type, outer_type;
6807 if (TREE_CODE (arg0) != NOP_EXPR
6808 && TREE_CODE (arg0) != CONVERT_EXPR)
6811 outer_type = TREE_TYPE (arg0);
6812 arg0_inner = TREE_OPERAND (arg0, 0);
6813 inner_type = TREE_TYPE (arg0_inner);
6815 #ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6824 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6827 /* If the conversion is from an integral subtype to its basetype
6829 if (TREE_TYPE (inner_type) == outer_type)
6832 if (TREE_CODE (arg1) != INTEGER_CST
6833 && !((TREE_CODE (arg1) == NOP_EXPR
6834 || TREE_CODE (arg1) == CONVERT_EXPR)
6835 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6838 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6843 if (TREE_CODE (arg1) == INTEGER_CST)
6844 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6845 TREE_INT_CST_HIGH (arg1), 0,
6846 TREE_OVERFLOW (arg1));
6848 arg1 = fold_convert (inner_type, arg1);
6850 return fold_build2 (code, type, arg0_inner, arg1);
6853 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6854 step of the array. Reconstructs s and delta in the case of s * delta
6855 being an integer constant (and thus already folded).
6856 ADDR is the address. MULT is the multiplicative expression.
6857 If the function succeeds, the new address expression is returned. Otherwise
6858 NULL_TREE is returned. */
6861 try_move_mult_to_index (tree addr, tree op1)
6863 tree s, delta, step;
6864 tree ref = TREE_OPERAND (addr, 0), pref;
6869 /* Strip the nops that might be added when converting op1 to sizetype. */
6872 /* Canonicalize op1 into a possibly non-constant delta
6873 and an INTEGER_CST s. */
6874 if (TREE_CODE (op1) == MULT_EXPR)
6876 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6881 if (TREE_CODE (arg0) == INTEGER_CST)
6886 else if (TREE_CODE (arg1) == INTEGER_CST)
6894 else if (TREE_CODE (op1) == INTEGER_CST)
6901 /* Simulate we are delta * 1. */
6903 s = integer_one_node;
6906 for (;; ref = TREE_OPERAND (ref, 0))
6908 if (TREE_CODE (ref) == ARRAY_REF)
6910 /* Remember if this was a multi-dimensional array. */
6911 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6914 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6918 step = array_ref_element_size (ref);
6919 if (TREE_CODE (step) != INTEGER_CST)
6924 if (! tree_int_cst_equal (step, s))
6929 /* Try if delta is a multiple of step. */
6930 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6936 /* Only fold here if we can verify we do not overflow one
6937 dimension of a multi-dimensional array. */
6942 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6943 || !INTEGRAL_TYPE_P (itype)
6944 || !TYPE_MAX_VALUE (itype)
6945 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6948 tmp = fold_binary (PLUS_EXPR, itype,
6949 fold_convert (itype,
6950 TREE_OPERAND (ref, 1)),
6951 fold_convert (itype, delta));
6953 || TREE_CODE (tmp) != INTEGER_CST
6954 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6963 if (!handled_component_p (ref))
6967 /* We found the suitable array reference. So copy everything up to it,
6968 and replace the index. */
6970 pref = TREE_OPERAND (addr, 0);
6971 ret = copy_node (pref);
6976 pref = TREE_OPERAND (pref, 0);
6977 TREE_OPERAND (pos, 0) = copy_node (pref);
6978 pos = TREE_OPERAND (pos, 0);
6981 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6982 fold_convert (itype,
6983 TREE_OPERAND (pos, 1)),
6984 fold_convert (itype, delta));
6986 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6990 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6991 means A >= Y && A != MAX, but in this case we know that
6992 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6995 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6997 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6999 if (TREE_CODE (bound) == LT_EXPR)
7000 a = TREE_OPERAND (bound, 0);
7001 else if (TREE_CODE (bound) == GT_EXPR)
7002 a = TREE_OPERAND (bound, 1);
7006 typea = TREE_TYPE (a);
7007 if (!INTEGRAL_TYPE_P (typea)
7008 && !POINTER_TYPE_P (typea))
7011 if (TREE_CODE (ineq) == LT_EXPR)
7013 a1 = TREE_OPERAND (ineq, 1);
7014 y = TREE_OPERAND (ineq, 0);
7016 else if (TREE_CODE (ineq) == GT_EXPR)
7018 a1 = TREE_OPERAND (ineq, 0);
7019 y = TREE_OPERAND (ineq, 1);
7024 if (TREE_TYPE (a1) != typea)
7027 if (POINTER_TYPE_P (typea))
7029 /* Convert the pointer types into integer before taking the difference. */
7030 tree ta = fold_convert (ssizetype, a);
7031 tree ta1 = fold_convert (ssizetype, a1);
7032 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7035 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7037 if (!diff || !integer_onep (diff))
7040 return fold_build2 (GE_EXPR, type, a, y);
7043 /* Fold a sum or difference of at least one multiplication.
7044 Returns the folded tree or NULL if no simplification could be made. */
7047 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7049 tree arg00, arg01, arg10, arg11;
7050 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7052 /* (A * C) +- (B * C) -> (A+-B) * C.
7053 (A * C) +- A -> A * (C+-1).
7054 We are most concerned about the case where C is a constant,
7055 but other combinations show up during loop reduction. Since
7056 it is not difficult, try all four possibilities. */
7058 if (TREE_CODE (arg0) == MULT_EXPR)
7060 arg00 = TREE_OPERAND (arg0, 0);
7061 arg01 = TREE_OPERAND (arg0, 1);
7063 else if (TREE_CODE (arg0) == INTEGER_CST)
7065 arg00 = build_one_cst (type);
7070 /* We cannot generate constant 1 for fract. */
7071 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7074 arg01 = build_one_cst (type);
7076 if (TREE_CODE (arg1) == MULT_EXPR)
7078 arg10 = TREE_OPERAND (arg1, 0);
7079 arg11 = TREE_OPERAND (arg1, 1);
7081 else if (TREE_CODE (arg1) == INTEGER_CST)
7083 arg10 = build_one_cst (type);
7088 /* We cannot generate constant 1 for fract. */
7089 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7092 arg11 = build_one_cst (type);
7096 if (operand_equal_p (arg01, arg11, 0))
7097 same = arg01, alt0 = arg00, alt1 = arg10;
7098 else if (operand_equal_p (arg00, arg10, 0))
7099 same = arg00, alt0 = arg01, alt1 = arg11;
7100 else if (operand_equal_p (arg00, arg11, 0))
7101 same = arg00, alt0 = arg01, alt1 = arg10;
7102 else if (operand_equal_p (arg01, arg10, 0))
7103 same = arg01, alt0 = arg00, alt1 = arg11;
7105 /* No identical multiplicands; see if we can find a common
7106 power-of-two factor in non-power-of-two multiplies. This
7107 can help in multi-dimensional array access. */
7108 else if (host_integerp (arg01, 0)
7109 && host_integerp (arg11, 0))
7111 HOST_WIDE_INT int01, int11, tmp;
7114 int01 = TREE_INT_CST_LOW (arg01);
7115 int11 = TREE_INT_CST_LOW (arg11);
7117 /* Move min of absolute values to int11. */
7118 if ((int01 >= 0 ? int01 : -int01)
7119 < (int11 >= 0 ? int11 : -int11))
7121 tmp = int01, int01 = int11, int11 = tmp;
7122 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7129 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7131 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7132 build_int_cst (TREE_TYPE (arg00),
7137 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7142 return fold_build2 (MULT_EXPR, type,
7143 fold_build2 (code, type,
7144 fold_convert (type, alt0),
7145 fold_convert (type, alt1)),
7146 fold_convert (type, same));
7151 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7157 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7159 tree type = TREE_TYPE (expr);
7160 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7161 int byte, offset, word, words;
7162 unsigned char value;
7164 if (total_bytes > len)
7166 words = total_bytes / UNITS_PER_WORD;
7168 for (byte = 0; byte < total_bytes; byte++)
7170 int bitpos = byte * BITS_PER_UNIT;
7171 if (bitpos < HOST_BITS_PER_WIDE_INT)
7172 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7174 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7175 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7177 if (total_bytes > UNITS_PER_WORD)
7179 word = byte / UNITS_PER_WORD;
7180 if (WORDS_BIG_ENDIAN)
7181 word = (words - 1) - word;
7182 offset = word * UNITS_PER_WORD;
7183 if (BYTES_BIG_ENDIAN)
7184 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7186 offset += byte % UNITS_PER_WORD;
7189 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7190 ptr[offset] = value;
7196 /* Subroutine of native_encode_expr. Encode the REAL_CST
7197 specified by EXPR into the buffer PTR of length LEN bytes.
7198 Return the number of bytes placed in the buffer, or zero
7202 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7204 tree type = TREE_TYPE (expr);
7205 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7206 int byte, offset, word, words, bitpos;
7207 unsigned char value;
7209 /* There are always 32 bits in each long, no matter the size of
7210 the hosts long. We handle floating point representations with
7214 if (total_bytes > len)
7216 words = 32 / UNITS_PER_WORD;
7218 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7220 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7221 bitpos += BITS_PER_UNIT)
7223 byte = (bitpos / BITS_PER_UNIT) & 3;
7224 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7226 if (UNITS_PER_WORD < 4)
7228 word = byte / UNITS_PER_WORD;
7229 if (WORDS_BIG_ENDIAN)
7230 word = (words - 1) - word;
7231 offset = word * UNITS_PER_WORD;
7232 if (BYTES_BIG_ENDIAN)
7233 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7235 offset += byte % UNITS_PER_WORD;
7238 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7239 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7244 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7245 specified by EXPR into the buffer PTR of length LEN bytes.
7246 Return the number of bytes placed in the buffer, or zero
7250 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7255 part = TREE_REALPART (expr);
7256 rsize = native_encode_expr (part, ptr, len);
7259 part = TREE_IMAGPART (expr);
7260 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7263 return rsize + isize;
7267 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7268 specified by EXPR into the buffer PTR of length LEN bytes.
7269 Return the number of bytes placed in the buffer, or zero
7273 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7275 int i, size, offset, count;
7276 tree itype, elem, elements;
7279 elements = TREE_VECTOR_CST_ELTS (expr);
7280 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7281 itype = TREE_TYPE (TREE_TYPE (expr));
7282 size = GET_MODE_SIZE (TYPE_MODE (itype));
7283 for (i = 0; i < count; i++)
7287 elem = TREE_VALUE (elements);
7288 elements = TREE_CHAIN (elements);
7295 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7300 if (offset + size > len)
7302 memset (ptr+offset, 0, size);
7310 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7311 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7312 buffer PTR of length LEN bytes. Return the number of bytes
7313 placed in the buffer, or zero upon failure. */
7316 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7318 switch (TREE_CODE (expr))
7321 return native_encode_int (expr, ptr, len);
7324 return native_encode_real (expr, ptr, len);
7327 return native_encode_complex (expr, ptr, len);
7330 return native_encode_vector (expr, ptr, len);
7338 /* Subroutine of native_interpret_expr. Interpret the contents of
7339 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7340 If the buffer cannot be interpreted, return NULL_TREE. */
7343 native_interpret_int (tree type, const unsigned char *ptr, int len)
7345 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7346 int byte, offset, word, words;
7347 unsigned char value;
7348 unsigned int HOST_WIDE_INT lo = 0;
7349 HOST_WIDE_INT hi = 0;
7351 if (total_bytes > len)
7353 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7355 words = total_bytes / UNITS_PER_WORD;
7357 for (byte = 0; byte < total_bytes; byte++)
7359 int bitpos = byte * BITS_PER_UNIT;
7360 if (total_bytes > UNITS_PER_WORD)
7362 word = byte / UNITS_PER_WORD;
7363 if (WORDS_BIG_ENDIAN)
7364 word = (words - 1) - word;
7365 offset = word * UNITS_PER_WORD;
7366 if (BYTES_BIG_ENDIAN)
7367 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7369 offset += byte % UNITS_PER_WORD;
7372 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7373 value = ptr[offset];
7375 if (bitpos < HOST_BITS_PER_WIDE_INT)
7376 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7378 hi |= (unsigned HOST_WIDE_INT) value
7379 << (bitpos - HOST_BITS_PER_WIDE_INT);
7382 return build_int_cst_wide_type (type, lo, hi);
7386 /* Subroutine of native_interpret_expr. Interpret the contents of
7387 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7388 If the buffer cannot be interpreted, return NULL_TREE. */
7391 native_interpret_real (tree type, const unsigned char *ptr, int len)
7393 enum machine_mode mode = TYPE_MODE (type);
7394 int total_bytes = GET_MODE_SIZE (mode);
7395 int byte, offset, word, words, bitpos;
7396 unsigned char value;
7397 /* There are always 32 bits in each long, no matter the size of
7398 the hosts long. We handle floating point representations with
7403 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7404 if (total_bytes > len || total_bytes > 24)
7406 words = 32 / UNITS_PER_WORD;
7408 memset (tmp, 0, sizeof (tmp));
7409 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7410 bitpos += BITS_PER_UNIT)
7412 byte = (bitpos / BITS_PER_UNIT) & 3;
7413 if (UNITS_PER_WORD < 4)
7415 word = byte / UNITS_PER_WORD;
7416 if (WORDS_BIG_ENDIAN)
7417 word = (words - 1) - word;
7418 offset = word * UNITS_PER_WORD;
7419 if (BYTES_BIG_ENDIAN)
7420 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7422 offset += byte % UNITS_PER_WORD;
7425 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7426 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7428 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7431 real_from_target (&r, tmp, mode);
7432 return build_real (type, r);
7436 /* Subroutine of native_interpret_expr. Interpret the contents of
7437 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7438 If the buffer cannot be interpreted, return NULL_TREE. */
7441 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7443 tree etype, rpart, ipart;
7446 etype = TREE_TYPE (type);
7447 size = GET_MODE_SIZE (TYPE_MODE (etype));
7450 rpart = native_interpret_expr (etype, ptr, size);
7453 ipart = native_interpret_expr (etype, ptr+size, size);
7456 return build_complex (type, rpart, ipart);
7460 /* Subroutine of native_interpret_expr. Interpret the contents of
7461 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7462 If the buffer cannot be interpreted, return NULL_TREE. */
7465 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7467 tree etype, elem, elements;
7470 etype = TREE_TYPE (type);
7471 size = GET_MODE_SIZE (TYPE_MODE (etype));
7472 count = TYPE_VECTOR_SUBPARTS (type);
7473 if (size * count > len)
7476 elements = NULL_TREE;
7477 for (i = count - 1; i >= 0; i--)
7479 elem = native_interpret_expr (etype, ptr+(i*size), size);
7482 elements = tree_cons (NULL_TREE, elem, elements);
7484 return build_vector (type, elements);
7488 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7489 the buffer PTR of length LEN as a constant of type TYPE. For
7490 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7491 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7492 return NULL_TREE. */
7495 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7497 switch (TREE_CODE (type))
7502 return native_interpret_int (type, ptr, len);
7505 return native_interpret_real (type, ptr, len);
7508 return native_interpret_complex (type, ptr, len);
7511 return native_interpret_vector (type, ptr, len);
7519 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7520 TYPE at compile-time. If we're unable to perform the conversion
7521 return NULL_TREE. */
7524 fold_view_convert_expr (tree type, tree expr)
7526 /* We support up to 512-bit values (for V8DFmode). */
7527 unsigned char buffer[64];
7530 /* Check that the host and target are sane. */
7531 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7534 len = native_encode_expr (expr, buffer, sizeof (buffer));
7538 return native_interpret_expr (type, buffer, len);
7541 /* Build an expression for the address of T. Folds away INDIRECT_REF
7542 to avoid confusing the gimplify process. When IN_FOLD is true
7543 avoid modifications of T. */
7546 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7548 /* The size of the object is not relevant when talking about its address. */
7549 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7550 t = TREE_OPERAND (t, 0);
7552 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7553 if (TREE_CODE (t) == INDIRECT_REF
7554 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7556 t = TREE_OPERAND (t, 0);
7558 if (TREE_TYPE (t) != ptrtype)
7559 t = build1 (NOP_EXPR, ptrtype, t);
7565 while (handled_component_p (base))
7566 base = TREE_OPERAND (base, 0);
7569 TREE_ADDRESSABLE (base) = 1;
7571 t = build1 (ADDR_EXPR, ptrtype, t);
7574 t = build1 (ADDR_EXPR, ptrtype, t);
7579 /* Build an expression for the address of T with type PTRTYPE. This
7580 function modifies the input parameter 'T' by sometimes setting the
7581 TREE_ADDRESSABLE flag. */
7584 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7586 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7589 /* Build an expression for the address of T. This function modifies
7590 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7591 flag. When called from fold functions, use fold_addr_expr instead. */
7594 build_fold_addr_expr (tree t)
7596 return build_fold_addr_expr_with_type_1 (t,
7597 build_pointer_type (TREE_TYPE (t)),
7601 /* Same as build_fold_addr_expr, builds an expression for the address
7602 of T, but avoids touching the input node 't'. Fold functions
7603 should use this version. */
7606 fold_addr_expr (tree t)
7608 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7610 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7613 /* Fold a unary expression of code CODE and type TYPE with operand
7614 OP0. Return the folded expression if folding is successful.
7615 Otherwise, return NULL_TREE. */
7618 fold_unary (enum tree_code code, tree type, tree op0)
7622 enum tree_code_class kind = TREE_CODE_CLASS (code);
7624 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7625 && TREE_CODE_LENGTH (code) == 1);
7630 if (code == NOP_EXPR || code == CONVERT_EXPR
7631 || code == FLOAT_EXPR || code == ABS_EXPR)
7633 /* Don't use STRIP_NOPS, because signedness of argument type
7635 STRIP_SIGN_NOPS (arg0);
7639 /* Strip any conversions that don't change the mode. This
7640 is safe for every expression, except for a comparison
7641 expression because its signedness is derived from its
7644 Note that this is done as an internal manipulation within
7645 the constant folder, in order to find the simplest
7646 representation of the arguments so that their form can be
7647 studied. In any cases, the appropriate type conversions
7648 should be put back in the tree that will get out of the
7654 if (TREE_CODE_CLASS (code) == tcc_unary)
7656 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7657 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7658 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7659 else if (TREE_CODE (arg0) == COND_EXPR)
7661 tree arg01 = TREE_OPERAND (arg0, 1);
7662 tree arg02 = TREE_OPERAND (arg0, 2);
7663 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7664 arg01 = fold_build1 (code, type, arg01);
7665 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7666 arg02 = fold_build1 (code, type, arg02);
7667 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7670 /* If this was a conversion, and all we did was to move into
7671 inside the COND_EXPR, bring it back out. But leave it if
7672 it is a conversion from integer to integer and the
7673 result precision is no wider than a word since such a
7674 conversion is cheap and may be optimized away by combine,
7675 while it couldn't if it were outside the COND_EXPR. Then return
7676 so we don't get into an infinite recursion loop taking the
7677 conversion out and then back in. */
7679 if ((code == NOP_EXPR || code == CONVERT_EXPR
7680 || code == NON_LVALUE_EXPR)
7681 && TREE_CODE (tem) == COND_EXPR
7682 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7683 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7684 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7685 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7686 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7687 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7688 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7690 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7691 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7692 || flag_syntax_only))
7693 tem = build1 (code, type,
7695 TREE_TYPE (TREE_OPERAND
7696 (TREE_OPERAND (tem, 1), 0)),
7697 TREE_OPERAND (tem, 0),
7698 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7699 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7702 else if (COMPARISON_CLASS_P (arg0))
7704 if (TREE_CODE (type) == BOOLEAN_TYPE)
7706 arg0 = copy_node (arg0);
7707 TREE_TYPE (arg0) = type;
7710 else if (TREE_CODE (type) != INTEGER_TYPE)
7711 return fold_build3 (COND_EXPR, type, arg0,
7712 fold_build1 (code, type,
7714 fold_build1 (code, type,
7715 integer_zero_node));
7722 /* Re-association barriers around constants and other re-association
7723 barriers can be removed. */
7724 if (CONSTANT_CLASS_P (op0)
7725 || TREE_CODE (op0) == PAREN_EXPR)
7726 return fold_convert (type, op0);
7732 case FIX_TRUNC_EXPR:
7733 if (TREE_TYPE (op0) == type)
7736 /* If we have (type) (a CMP b) and type is an integral type, return
7737 new expression involving the new type. */
7738 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7739 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7740 TREE_OPERAND (op0, 1));
7742 /* Handle cases of two conversions in a row. */
7743 if (TREE_CODE (op0) == NOP_EXPR
7744 || TREE_CODE (op0) == CONVERT_EXPR)
7746 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7747 tree inter_type = TREE_TYPE (op0);
7748 int inside_int = INTEGRAL_TYPE_P (inside_type);
7749 int inside_ptr = POINTER_TYPE_P (inside_type);
7750 int inside_float = FLOAT_TYPE_P (inside_type);
7751 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7752 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7753 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7754 int inter_int = INTEGRAL_TYPE_P (inter_type);
7755 int inter_ptr = POINTER_TYPE_P (inter_type);
7756 int inter_float = FLOAT_TYPE_P (inter_type);
7757 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7758 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7759 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7760 int final_int = INTEGRAL_TYPE_P (type);
7761 int final_ptr = POINTER_TYPE_P (type);
7762 int final_float = FLOAT_TYPE_P (type);
7763 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7764 unsigned int final_prec = TYPE_PRECISION (type);
7765 int final_unsignedp = TYPE_UNSIGNED (type);
7767 /* In addition to the cases of two conversions in a row
7768 handled below, if we are converting something to its own
7769 type via an object of identical or wider precision, neither
7770 conversion is needed. */
7771 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7772 && (((inter_int || inter_ptr) && final_int)
7773 || (inter_float && final_float))
7774 && inter_prec >= final_prec)
7775 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7777 /* Likewise, if the intermediate and final types are either both
7778 float or both integer, we don't need the middle conversion if
7779 it is wider than the final type and doesn't change the signedness
7780 (for integers). Avoid this if the final type is a pointer
7781 since then we sometimes need the inner conversion. Likewise if
7782 the outer has a precision not equal to the size of its mode. */
7783 if (((inter_int && inside_int)
7784 || (inter_float && inside_float)
7785 || (inter_vec && inside_vec))
7786 && inter_prec >= inside_prec
7787 && (inter_float || inter_vec
7788 || inter_unsignedp == inside_unsignedp)
7789 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7790 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7792 && (! final_vec || inter_prec == inside_prec))
7793 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7795 /* If we have a sign-extension of a zero-extended value, we can
7796 replace that by a single zero-extension. */
7797 if (inside_int && inter_int && final_int
7798 && inside_prec < inter_prec && inter_prec < final_prec
7799 && inside_unsignedp && !inter_unsignedp)
7800 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7802 /* Two conversions in a row are not needed unless:
7803 - some conversion is floating-point (overstrict for now), or
7804 - some conversion is a vector (overstrict for now), or
7805 - the intermediate type is narrower than both initial and
7807 - the intermediate type and innermost type differ in signedness,
7808 and the outermost type is wider than the intermediate, or
7809 - the initial type is a pointer type and the precisions of the
7810 intermediate and final types differ, or
7811 - the final type is a pointer type and the precisions of the
7812 initial and intermediate types differ. */
7813 if (! inside_float && ! inter_float && ! final_float
7814 && ! inside_vec && ! inter_vec && ! final_vec
7815 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7816 && ! (inside_int && inter_int
7817 && inter_unsignedp != inside_unsignedp
7818 && inter_prec < final_prec)
7819 && ((inter_unsignedp && inter_prec > inside_prec)
7820 == (final_unsignedp && final_prec > inter_prec))
7821 && ! (inside_ptr && inter_prec != final_prec)
7822 && ! (final_ptr && inside_prec != inter_prec)
7823 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7824 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7825 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7828 /* Handle (T *)&A.B.C for A being of type T and B and C
7829 living at offset zero. This occurs frequently in
7830 C++ upcasting and then accessing the base. */
7831 if (TREE_CODE (op0) == ADDR_EXPR
7832 && POINTER_TYPE_P (type)
7833 && handled_component_p (TREE_OPERAND (op0, 0)))
7835 HOST_WIDE_INT bitsize, bitpos;
7837 enum machine_mode mode;
7838 int unsignedp, volatilep;
7839 tree base = TREE_OPERAND (op0, 0);
7840 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7841 &mode, &unsignedp, &volatilep, false);
7842 /* If the reference was to a (constant) zero offset, we can use
7843 the address of the base if it has the same base type
7844 as the result type. */
7845 if (! offset && bitpos == 0
7846 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7847 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7848 return fold_convert (type, fold_addr_expr (base));
7851 if ((TREE_CODE (op0) == MODIFY_EXPR
7852 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7853 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7854 /* Detect assigning a bitfield. */
7855 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7857 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7859 /* Don't leave an assignment inside a conversion
7860 unless assigning a bitfield. */
7861 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7862 /* First do the assignment, then return converted constant. */
7863 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7864 TREE_NO_WARNING (tem) = 1;
7865 TREE_USED (tem) = 1;
7869 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7870 constants (if x has signed type, the sign bit cannot be set
7871 in c). This folds extension into the BIT_AND_EXPR. */
7872 if (INTEGRAL_TYPE_P (type)
7873 && TREE_CODE (type) != BOOLEAN_TYPE
7874 && TREE_CODE (op0) == BIT_AND_EXPR
7875 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7878 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7881 if (TYPE_UNSIGNED (TREE_TYPE (and))
7882 || (TYPE_PRECISION (type)
7883 <= TYPE_PRECISION (TREE_TYPE (and))))
7885 else if (TYPE_PRECISION (TREE_TYPE (and1))
7886 <= HOST_BITS_PER_WIDE_INT
7887 && host_integerp (and1, 1))
7889 unsigned HOST_WIDE_INT cst;
7891 cst = tree_low_cst (and1, 1);
7892 cst &= (HOST_WIDE_INT) -1
7893 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7894 change = (cst == 0);
7895 #ifdef LOAD_EXTEND_OP
7897 && !flag_syntax_only
7898 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7901 tree uns = unsigned_type_for (TREE_TYPE (and0));
7902 and0 = fold_convert (uns, and0);
7903 and1 = fold_convert (uns, and1);
7909 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7910 TREE_INT_CST_HIGH (and1), 0,
7911 TREE_OVERFLOW (and1));
7912 return fold_build2 (BIT_AND_EXPR, type,
7913 fold_convert (type, and0), tem);
7917 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7918 when one of the new casts will fold away. Conservatively we assume
7919 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7920 if (POINTER_TYPE_P (type)
7921 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7922 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7923 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7924 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7926 tree arg00 = TREE_OPERAND (arg0, 0);
7927 tree arg01 = TREE_OPERAND (arg0, 1);
7929 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7930 fold_convert (sizetype, arg01));
7933 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7934 of the same precision, and X is an integer type not narrower than
7935 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7936 if (INTEGRAL_TYPE_P (type)
7937 && TREE_CODE (op0) == BIT_NOT_EXPR
7938 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7939 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7940 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7941 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7943 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7944 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7945 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7946 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7949 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7950 type of X and Y (integer types only). */
7951 if (INTEGRAL_TYPE_P (type)
7952 && TREE_CODE (op0) == MULT_EXPR
7953 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7954 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7956 /* Be careful not to introduce new overflows. */
7958 if (TYPE_OVERFLOW_WRAPS (type))
7961 mult_type = unsigned_type_for (type);
7963 tem = fold_build2 (MULT_EXPR, mult_type,
7964 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7965 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7966 return fold_convert (type, tem);
7969 tem = fold_convert_const (code, type, op0);
7970 return tem ? tem : NULL_TREE;
7972 case FIXED_CONVERT_EXPR:
7973 tem = fold_convert_const (code, type, arg0);
7974 return tem ? tem : NULL_TREE;
7976 case VIEW_CONVERT_EXPR:
7977 if (TREE_TYPE (op0) == type)
7979 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7980 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7982 /* For integral conversions with the same precision or pointer
7983 conversions use a NOP_EXPR instead. */
7984 if ((INTEGRAL_TYPE_P (type)
7985 || POINTER_TYPE_P (type))
7986 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7987 || POINTER_TYPE_P (TREE_TYPE (op0)))
7988 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7989 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7990 a sub-type to its base type as generated by the Ada FE. */
7991 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7992 && TREE_TYPE (TREE_TYPE (op0))))
7993 return fold_convert (type, op0);
7995 /* Strip inner integral conversions that do not change the precision. */
7996 if ((TREE_CODE (op0) == NOP_EXPR
7997 || TREE_CODE (op0) == CONVERT_EXPR)
7998 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7999 || POINTER_TYPE_P (TREE_TYPE (op0)))
8000 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8001 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8002 && (TYPE_PRECISION (TREE_TYPE (op0))
8003 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8004 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8006 return fold_view_convert_expr (type, op0);
8009 tem = fold_negate_expr (arg0);
8011 return fold_convert (type, tem);
8015 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8016 return fold_abs_const (arg0, type);
8017 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8018 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8019 /* Convert fabs((double)float) into (double)fabsf(float). */
8020 else if (TREE_CODE (arg0) == NOP_EXPR
8021 && TREE_CODE (type) == REAL_TYPE)
8023 tree targ0 = strip_float_extensions (arg0);
8025 return fold_convert (type, fold_build1 (ABS_EXPR,
8029 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8030 else if (TREE_CODE (arg0) == ABS_EXPR)
8032 else if (tree_expr_nonnegative_p (arg0))
8035 /* Strip sign ops from argument. */
8036 if (TREE_CODE (type) == REAL_TYPE)
8038 tem = fold_strip_sign_ops (arg0);
8040 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8045 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8046 return fold_convert (type, arg0);
8047 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8049 tree itype = TREE_TYPE (type);
8050 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8051 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8052 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8054 if (TREE_CODE (arg0) == COMPLEX_CST)
8056 tree itype = TREE_TYPE (type);
8057 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8058 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8059 return build_complex (type, rpart, negate_expr (ipart));
8061 if (TREE_CODE (arg0) == CONJ_EXPR)
8062 return fold_convert (type, TREE_OPERAND (arg0, 0));
8066 if (TREE_CODE (arg0) == INTEGER_CST)
8067 return fold_not_const (arg0, type);
8068 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8069 return fold_convert (type, TREE_OPERAND (arg0, 0));
8070 /* Convert ~ (-A) to A - 1. */
8071 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8072 return fold_build2 (MINUS_EXPR, type,
8073 fold_convert (type, TREE_OPERAND (arg0, 0)),
8074 build_int_cst (type, 1));
8075 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8076 else if (INTEGRAL_TYPE_P (type)
8077 && ((TREE_CODE (arg0) == MINUS_EXPR
8078 && integer_onep (TREE_OPERAND (arg0, 1)))
8079 || (TREE_CODE (arg0) == PLUS_EXPR
8080 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8081 return fold_build1 (NEGATE_EXPR, type,
8082 fold_convert (type, TREE_OPERAND (arg0, 0)));
8083 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8084 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8085 && (tem = fold_unary (BIT_NOT_EXPR, type,
8087 TREE_OPERAND (arg0, 0)))))
8088 return fold_build2 (BIT_XOR_EXPR, type, tem,
8089 fold_convert (type, TREE_OPERAND (arg0, 1)));
8090 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8091 && (tem = fold_unary (BIT_NOT_EXPR, type,
8093 TREE_OPERAND (arg0, 1)))))
8094 return fold_build2 (BIT_XOR_EXPR, type,
8095 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8096 /* Perform BIT_NOT_EXPR on each element individually. */
8097 else if (TREE_CODE (arg0) == VECTOR_CST)
8099 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8100 int count = TYPE_VECTOR_SUBPARTS (type), i;
8102 for (i = 0; i < count; i++)
8106 elem = TREE_VALUE (elements);
8107 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8108 if (elem == NULL_TREE)
8110 elements = TREE_CHAIN (elements);
8113 elem = build_int_cst (TREE_TYPE (type), -1);
8114 list = tree_cons (NULL_TREE, elem, list);
8117 return build_vector (type, nreverse (list));
8122 case TRUTH_NOT_EXPR:
8123 /* The argument to invert_truthvalue must have Boolean type. */
8124 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8125 arg0 = fold_convert (boolean_type_node, arg0);
8127 /* Note that the operand of this must be an int
8128 and its values must be 0 or 1.
8129 ("true" is a fixed value perhaps depending on the language,
8130 but we don't handle values other than 1 correctly yet.) */
8131 tem = fold_truth_not_expr (arg0);
8134 return fold_convert (type, tem);
8137 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8138 return fold_convert (type, arg0);
8139 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8140 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8141 TREE_OPERAND (arg0, 1));
8142 if (TREE_CODE (arg0) == COMPLEX_CST)
8143 return fold_convert (type, TREE_REALPART (arg0));
8144 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8146 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8147 tem = fold_build2 (TREE_CODE (arg0), itype,
8148 fold_build1 (REALPART_EXPR, itype,
8149 TREE_OPERAND (arg0, 0)),
8150 fold_build1 (REALPART_EXPR, itype,
8151 TREE_OPERAND (arg0, 1)));
8152 return fold_convert (type, tem);
8154 if (TREE_CODE (arg0) == CONJ_EXPR)
8156 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8157 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8158 return fold_convert (type, tem);
8160 if (TREE_CODE (arg0) == CALL_EXPR)
8162 tree fn = get_callee_fndecl (arg0);
8163 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8164 switch (DECL_FUNCTION_CODE (fn))
8166 CASE_FLT_FN (BUILT_IN_CEXPI):
8167 fn = mathfn_built_in (type, BUILT_IN_COS);
8169 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8179 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8180 return fold_convert (type, integer_zero_node);
8181 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8182 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8183 TREE_OPERAND (arg0, 0));
8184 if (TREE_CODE (arg0) == COMPLEX_CST)
8185 return fold_convert (type, TREE_IMAGPART (arg0));
8186 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8188 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8189 tem = fold_build2 (TREE_CODE (arg0), itype,
8190 fold_build1 (IMAGPART_EXPR, itype,
8191 TREE_OPERAND (arg0, 0)),
8192 fold_build1 (IMAGPART_EXPR, itype,
8193 TREE_OPERAND (arg0, 1)));
8194 return fold_convert (type, tem);
8196 if (TREE_CODE (arg0) == CONJ_EXPR)
8198 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8199 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8200 return fold_convert (type, negate_expr (tem));
8202 if (TREE_CODE (arg0) == CALL_EXPR)
8204 tree fn = get_callee_fndecl (arg0);
8205 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8206 switch (DECL_FUNCTION_CODE (fn))
8208 CASE_FLT_FN (BUILT_IN_CEXPI):
8209 fn = mathfn_built_in (type, BUILT_IN_SIN);
8211 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8222 } /* switch (code) */
8225 /* Fold a binary expression of code CODE and type TYPE with operands
8226 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8227 Return the folded expression if folding is successful. Otherwise,
8228 return NULL_TREE. */
8231 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8233 enum tree_code compl_code;
8235 if (code == MIN_EXPR)
8236 compl_code = MAX_EXPR;
8237 else if (code == MAX_EXPR)
8238 compl_code = MIN_EXPR;
8242 /* MIN (MAX (a, b), b) == b. */
8243 if (TREE_CODE (op0) == compl_code
8244 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8245 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8247 /* MIN (MAX (b, a), b) == b. */
8248 if (TREE_CODE (op0) == compl_code
8249 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8250 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8251 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8253 /* MIN (a, MAX (a, b)) == a. */
8254 if (TREE_CODE (op1) == compl_code
8255 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8256 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8257 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8259 /* MIN (a, MAX (b, a)) == a. */
8260 if (TREE_CODE (op1) == compl_code
8261 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8262 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8263 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8268 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8269 by changing CODE to reduce the magnitude of constants involved in
8270 ARG0 of the comparison.
8271 Returns a canonicalized comparison tree if a simplification was
8272 possible, otherwise returns NULL_TREE.
8273 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8274 valid if signed overflow is undefined. */
8277 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8278 tree arg0, tree arg1,
8279 bool *strict_overflow_p)
8281 enum tree_code code0 = TREE_CODE (arg0);
8282 tree t, cst0 = NULL_TREE;
8286 /* Match A +- CST code arg1 and CST code arg1. */
8287 if (!(((code0 == MINUS_EXPR
8288 || code0 == PLUS_EXPR)
8289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8290 || code0 == INTEGER_CST))
8293 /* Identify the constant in arg0 and its sign. */
8294 if (code0 == INTEGER_CST)
8297 cst0 = TREE_OPERAND (arg0, 1);
8298 sgn0 = tree_int_cst_sgn (cst0);
8300 /* Overflowed constants and zero will cause problems. */
8301 if (integer_zerop (cst0)
8302 || TREE_OVERFLOW (cst0))
8305 /* See if we can reduce the magnitude of the constant in
8306 arg0 by changing the comparison code. */
8307 if (code0 == INTEGER_CST)
8309 /* CST <= arg1 -> CST-1 < arg1. */
8310 if (code == LE_EXPR && sgn0 == 1)
8312 /* -CST < arg1 -> -CST-1 <= arg1. */
8313 else if (code == LT_EXPR && sgn0 == -1)
8315 /* CST > arg1 -> CST-1 >= arg1. */
8316 else if (code == GT_EXPR && sgn0 == 1)
8318 /* -CST >= arg1 -> -CST-1 > arg1. */
8319 else if (code == GE_EXPR && sgn0 == -1)
8323 /* arg1 code' CST' might be more canonical. */
8328 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8330 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8332 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8333 else if (code == GT_EXPR
8334 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8336 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8337 else if (code == LE_EXPR
8338 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8340 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8341 else if (code == GE_EXPR
8342 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8346 *strict_overflow_p = true;
8349 /* Now build the constant reduced in magnitude. */
8350 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8351 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8352 if (code0 != INTEGER_CST)
8353 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8355 /* If swapping might yield to a more canonical form, do so. */
8357 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8359 return fold_build2 (code, type, t, arg1);
8362 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8363 overflow further. Try to decrease the magnitude of constants involved
8364 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8365 and put sole constants at the second argument position.
8366 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8369 maybe_canonicalize_comparison (enum tree_code code, tree type,
8370 tree arg0, tree arg1)
8373 bool strict_overflow_p;
8374 const char * const warnmsg = G_("assuming signed overflow does not occur "
8375 "when reducing constant in comparison");
8377 /* In principle pointers also have undefined overflow behavior,
8378 but that causes problems elsewhere. */
8379 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8380 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8383 /* Try canonicalization by simplifying arg0. */
8384 strict_overflow_p = false;
8385 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8386 &strict_overflow_p);
8389 if (strict_overflow_p)
8390 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8394 /* Try canonicalization by simplifying arg1 using the swapped
8396 code = swap_tree_comparison (code);
8397 strict_overflow_p = false;
8398 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8399 &strict_overflow_p);
8400 if (t && strict_overflow_p)
8401 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8405 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8406 space. This is used to avoid issuing overflow warnings for
8407 expressions like &p->x which can not wrap. */
8410 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8412 unsigned HOST_WIDE_INT offset_low, total_low;
8413 HOST_WIDE_INT size, offset_high, total_high;
8415 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8421 if (offset == NULL_TREE)
8426 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8430 offset_low = TREE_INT_CST_LOW (offset);
8431 offset_high = TREE_INT_CST_HIGH (offset);
8434 if (add_double_with_sign (offset_low, offset_high,
8435 bitpos / BITS_PER_UNIT, 0,
8436 &total_low, &total_high,
8440 if (total_high != 0)
8443 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8447 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8449 if (TREE_CODE (base) == ADDR_EXPR)
8451 HOST_WIDE_INT base_size;
8453 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8454 if (base_size > 0 && size < base_size)
8458 return total_low > (unsigned HOST_WIDE_INT) size;
8461 /* Subroutine of fold_binary. This routine performs all of the
8462 transformations that are common to the equality/inequality
8463 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8464 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8465 fold_binary should call fold_binary. Fold a comparison with
8466 tree code CODE and type TYPE with operands OP0 and OP1. Return
8467 the folded comparison or NULL_TREE. */
8470 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8472 tree arg0, arg1, tem;
8477 STRIP_SIGN_NOPS (arg0);
8478 STRIP_SIGN_NOPS (arg1);
8480 tem = fold_relational_const (code, type, arg0, arg1);
8481 if (tem != NULL_TREE)
8484 /* If one arg is a real or integer constant, put it last. */
8485 if (tree_swap_operands_p (arg0, arg1, true))
8486 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8488 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8489 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8490 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8491 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8492 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8493 && (TREE_CODE (arg1) == INTEGER_CST
8494 && !TREE_OVERFLOW (arg1)))
8496 tree const1 = TREE_OPERAND (arg0, 1);
8498 tree variable = TREE_OPERAND (arg0, 0);
8501 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8503 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8504 TREE_TYPE (arg1), const2, const1);
8506 /* If the constant operation overflowed this can be
8507 simplified as a comparison against INT_MAX/INT_MIN. */
8508 if (TREE_CODE (lhs) == INTEGER_CST
8509 && TREE_OVERFLOW (lhs))
8511 int const1_sgn = tree_int_cst_sgn (const1);
8512 enum tree_code code2 = code;
8514 /* Get the sign of the constant on the lhs if the
8515 operation were VARIABLE + CONST1. */
8516 if (TREE_CODE (arg0) == MINUS_EXPR)
8517 const1_sgn = -const1_sgn;
8519 /* The sign of the constant determines if we overflowed
8520 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8521 Canonicalize to the INT_MIN overflow by swapping the comparison
8523 if (const1_sgn == -1)
8524 code2 = swap_tree_comparison (code);
8526 /* We now can look at the canonicalized case
8527 VARIABLE + 1 CODE2 INT_MIN
8528 and decide on the result. */
8529 if (code2 == LT_EXPR
8531 || code2 == EQ_EXPR)
8532 return omit_one_operand (type, boolean_false_node, variable);
8533 else if (code2 == NE_EXPR
8535 || code2 == GT_EXPR)
8536 return omit_one_operand (type, boolean_true_node, variable);
8539 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8540 && (TREE_CODE (lhs) != INTEGER_CST
8541 || !TREE_OVERFLOW (lhs)))
8543 fold_overflow_warning (("assuming signed overflow does not occur "
8544 "when changing X +- C1 cmp C2 to "
8546 WARN_STRICT_OVERFLOW_COMPARISON);
8547 return fold_build2 (code, type, variable, lhs);
8551 /* For comparisons of pointers we can decompose it to a compile time
8552 comparison of the base objects and the offsets into the object.
8553 This requires at least one operand being an ADDR_EXPR or a
8554 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8555 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8556 && (TREE_CODE (arg0) == ADDR_EXPR
8557 || TREE_CODE (arg1) == ADDR_EXPR
8558 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8559 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8561 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8562 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8563 enum machine_mode mode;
8564 int volatilep, unsignedp;
8565 bool indirect_base0 = false, indirect_base1 = false;
8567 /* Get base and offset for the access. Strip ADDR_EXPR for
8568 get_inner_reference, but put it back by stripping INDIRECT_REF
8569 off the base object if possible. indirect_baseN will be true
8570 if baseN is not an address but refers to the object itself. */
8572 if (TREE_CODE (arg0) == ADDR_EXPR)
8574 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8575 &bitsize, &bitpos0, &offset0, &mode,
8576 &unsignedp, &volatilep, false);
8577 if (TREE_CODE (base0) == INDIRECT_REF)
8578 base0 = TREE_OPERAND (base0, 0);
8580 indirect_base0 = true;
8582 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8584 base0 = TREE_OPERAND (arg0, 0);
8585 offset0 = TREE_OPERAND (arg0, 1);
8589 if (TREE_CODE (arg1) == ADDR_EXPR)
8591 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8592 &bitsize, &bitpos1, &offset1, &mode,
8593 &unsignedp, &volatilep, false);
8594 if (TREE_CODE (base1) == INDIRECT_REF)
8595 base1 = TREE_OPERAND (base1, 0);
8597 indirect_base1 = true;
8599 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8601 base1 = TREE_OPERAND (arg1, 0);
8602 offset1 = TREE_OPERAND (arg1, 1);
8605 /* If we have equivalent bases we might be able to simplify. */
8606 if (indirect_base0 == indirect_base1
8607 && operand_equal_p (base0, base1, 0))
8609 /* We can fold this expression to a constant if the non-constant
8610 offset parts are equal. */
8611 if ((offset0 == offset1
8612 || (offset0 && offset1
8613 && operand_equal_p (offset0, offset1, 0)))
8616 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8621 && bitpos0 != bitpos1
8622 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8623 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8624 fold_overflow_warning (("assuming pointer wraparound does not "
8625 "occur when comparing P +- C1 with "
8627 WARN_STRICT_OVERFLOW_CONDITIONAL);
8632 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8634 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8636 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8638 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8640 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8642 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8646 /* We can simplify the comparison to a comparison of the variable
8647 offset parts if the constant offset parts are equal.
8648 Be careful to use signed size type here because otherwise we
8649 mess with array offsets in the wrong way. This is possible
8650 because pointer arithmetic is restricted to retain within an
8651 object and overflow on pointer differences is undefined as of
8652 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8653 else if (bitpos0 == bitpos1
8654 && ((code == EQ_EXPR || code == NE_EXPR)
8655 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8657 tree signed_size_type_node;
8658 signed_size_type_node = signed_type_for (size_type_node);
8660 /* By converting to signed size type we cover middle-end pointer
8661 arithmetic which operates on unsigned pointer types of size
8662 type size and ARRAY_REF offsets which are properly sign or
8663 zero extended from their type in case it is narrower than
8665 if (offset0 == NULL_TREE)
8666 offset0 = build_int_cst (signed_size_type_node, 0);
8668 offset0 = fold_convert (signed_size_type_node, offset0);
8669 if (offset1 == NULL_TREE)
8670 offset1 = build_int_cst (signed_size_type_node, 0);
8672 offset1 = fold_convert (signed_size_type_node, offset1);
8676 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8677 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8678 fold_overflow_warning (("assuming pointer wraparound does not "
8679 "occur when comparing P +- C1 with "
8681 WARN_STRICT_OVERFLOW_COMPARISON);
8683 return fold_build2 (code, type, offset0, offset1);
8686 /* For non-equal bases we can simplify if they are addresses
8687 of local binding decls or constants. */
8688 else if (indirect_base0 && indirect_base1
8689 /* We know that !operand_equal_p (base0, base1, 0)
8690 because the if condition was false. But make
8691 sure two decls are not the same. */
8693 && TREE_CODE (arg0) == ADDR_EXPR
8694 && TREE_CODE (arg1) == ADDR_EXPR
8695 && (((TREE_CODE (base0) == VAR_DECL
8696 || TREE_CODE (base0) == PARM_DECL)
8697 && (targetm.binds_local_p (base0)
8698 || CONSTANT_CLASS_P (base1)))
8699 || CONSTANT_CLASS_P (base0))
8700 && (((TREE_CODE (base1) == VAR_DECL
8701 || TREE_CODE (base1) == PARM_DECL)
8702 && (targetm.binds_local_p (base1)
8703 || CONSTANT_CLASS_P (base0)))
8704 || CONSTANT_CLASS_P (base1)))
8706 if (code == EQ_EXPR)
8707 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8708 else if (code == NE_EXPR)
8709 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8711 /* For equal offsets we can simplify to a comparison of the
8713 else if (bitpos0 == bitpos1
8715 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8717 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8718 && ((offset0 == offset1)
8719 || (offset0 && offset1
8720 && operand_equal_p (offset0, offset1, 0))))
8723 base0 = fold_addr_expr (base0);
8725 base1 = fold_addr_expr (base1);
8726 return fold_build2 (code, type, base0, base1);
8730 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8731 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8732 the resulting offset is smaller in absolute value than the
8734 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8735 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8736 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8737 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8738 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8739 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8740 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8742 tree const1 = TREE_OPERAND (arg0, 1);
8743 tree const2 = TREE_OPERAND (arg1, 1);
8744 tree variable1 = TREE_OPERAND (arg0, 0);
8745 tree variable2 = TREE_OPERAND (arg1, 0);
8747 const char * const warnmsg = G_("assuming signed overflow does not "
8748 "occur when combining constants around "
8751 /* Put the constant on the side where it doesn't overflow and is
8752 of lower absolute value than before. */
8753 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8754 ? MINUS_EXPR : PLUS_EXPR,
8756 if (!TREE_OVERFLOW (cst)
8757 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8759 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8760 return fold_build2 (code, type,
8762 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8766 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8767 ? MINUS_EXPR : PLUS_EXPR,
8769 if (!TREE_OVERFLOW (cst)
8770 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8772 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8773 return fold_build2 (code, type,
8774 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8780 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8781 signed arithmetic case. That form is created by the compiler
8782 often enough for folding it to be of value. One example is in
8783 computing loop trip counts after Operator Strength Reduction. */
8784 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8785 && TREE_CODE (arg0) == MULT_EXPR
8786 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8787 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8788 && integer_zerop (arg1))
8790 tree const1 = TREE_OPERAND (arg0, 1);
8791 tree const2 = arg1; /* zero */
8792 tree variable1 = TREE_OPERAND (arg0, 0);
8793 enum tree_code cmp_code = code;
8795 gcc_assert (!integer_zerop (const1));
8797 fold_overflow_warning (("assuming signed overflow does not occur when "
8798 "eliminating multiplication in comparison "
8800 WARN_STRICT_OVERFLOW_COMPARISON);
8802 /* If const1 is negative we swap the sense of the comparison. */
8803 if (tree_int_cst_sgn (const1) < 0)
8804 cmp_code = swap_tree_comparison (cmp_code);
8806 return fold_build2 (cmp_code, type, variable1, const2);
8809 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8813 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8815 tree targ0 = strip_float_extensions (arg0);
8816 tree targ1 = strip_float_extensions (arg1);
8817 tree newtype = TREE_TYPE (targ0);
8819 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8820 newtype = TREE_TYPE (targ1);
8822 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8823 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8824 return fold_build2 (code, type, fold_convert (newtype, targ0),
8825 fold_convert (newtype, targ1));
8827 /* (-a) CMP (-b) -> b CMP a */
8828 if (TREE_CODE (arg0) == NEGATE_EXPR
8829 && TREE_CODE (arg1) == NEGATE_EXPR)
8830 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8831 TREE_OPERAND (arg0, 0));
8833 if (TREE_CODE (arg1) == REAL_CST)
8835 REAL_VALUE_TYPE cst;
8836 cst = TREE_REAL_CST (arg1);
8838 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8839 if (TREE_CODE (arg0) == NEGATE_EXPR)
8840 return fold_build2 (swap_tree_comparison (code), type,
8841 TREE_OPERAND (arg0, 0),
8842 build_real (TREE_TYPE (arg1),
8843 REAL_VALUE_NEGATE (cst)));
8845 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8846 /* a CMP (-0) -> a CMP 0 */
8847 if (REAL_VALUE_MINUS_ZERO (cst))
8848 return fold_build2 (code, type, arg0,
8849 build_real (TREE_TYPE (arg1), dconst0));
8851 /* x != NaN is always true, other ops are always false. */
8852 if (REAL_VALUE_ISNAN (cst)
8853 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8855 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8856 return omit_one_operand (type, tem, arg0);
8859 /* Fold comparisons against infinity. */
8860 if (REAL_VALUE_ISINF (cst))
8862 tem = fold_inf_compare (code, type, arg0, arg1);
8863 if (tem != NULL_TREE)
8868 /* If this is a comparison of a real constant with a PLUS_EXPR
8869 or a MINUS_EXPR of a real constant, we can convert it into a
8870 comparison with a revised real constant as long as no overflow
8871 occurs when unsafe_math_optimizations are enabled. */
8872 if (flag_unsafe_math_optimizations
8873 && TREE_CODE (arg1) == REAL_CST
8874 && (TREE_CODE (arg0) == PLUS_EXPR
8875 || TREE_CODE (arg0) == MINUS_EXPR)
8876 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8877 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8878 ? MINUS_EXPR : PLUS_EXPR,
8879 arg1, TREE_OPERAND (arg0, 1), 0))
8880 && !TREE_OVERFLOW (tem))
8881 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8883 /* Likewise, we can simplify a comparison of a real constant with
8884 a MINUS_EXPR whose first operand is also a real constant, i.e.
8885 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8886 floating-point types only if -fassociative-math is set. */
8887 if (flag_associative_math
8888 && TREE_CODE (arg1) == REAL_CST
8889 && TREE_CODE (arg0) == MINUS_EXPR
8890 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8891 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8893 && !TREE_OVERFLOW (tem))
8894 return fold_build2 (swap_tree_comparison (code), type,
8895 TREE_OPERAND (arg0, 1), tem);
8897 /* Fold comparisons against built-in math functions. */
8898 if (TREE_CODE (arg1) == REAL_CST
8899 && flag_unsafe_math_optimizations
8900 && ! flag_errno_math)
8902 enum built_in_function fcode = builtin_mathfn_code (arg0);
8904 if (fcode != END_BUILTINS)
8906 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8907 if (tem != NULL_TREE)
8913 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8914 && (TREE_CODE (arg0) == NOP_EXPR
8915 || TREE_CODE (arg0) == CONVERT_EXPR))
8917 /* If we are widening one operand of an integer comparison,
8918 see if the other operand is similarly being widened. Perhaps we
8919 can do the comparison in the narrower type. */
8920 tem = fold_widened_comparison (code, type, arg0, arg1);
8924 /* Or if we are changing signedness. */
8925 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8930 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8931 constant, we can simplify it. */
8932 if (TREE_CODE (arg1) == INTEGER_CST
8933 && (TREE_CODE (arg0) == MIN_EXPR
8934 || TREE_CODE (arg0) == MAX_EXPR)
8935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8937 tem = optimize_minmax_comparison (code, type, op0, op1);
8942 /* Simplify comparison of something with itself. (For IEEE
8943 floating-point, we can only do some of these simplifications.) */
8944 if (operand_equal_p (arg0, arg1, 0))
8949 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8950 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8951 return constant_boolean_node (1, type);
8956 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8957 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8958 return constant_boolean_node (1, type);
8959 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8962 /* For NE, we can only do this simplification if integer
8963 or we don't honor IEEE floating point NaNs. */
8964 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8965 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8967 /* ... fall through ... */
8970 return constant_boolean_node (0, type);
8976 /* If we are comparing an expression that just has comparisons
8977 of two integer values, arithmetic expressions of those comparisons,
8978 and constants, we can simplify it. There are only three cases
8979 to check: the two values can either be equal, the first can be
8980 greater, or the second can be greater. Fold the expression for
8981 those three values. Since each value must be 0 or 1, we have
8982 eight possibilities, each of which corresponds to the constant 0
8983 or 1 or one of the six possible comparisons.
8985 This handles common cases like (a > b) == 0 but also handles
8986 expressions like ((x > y) - (y > x)) > 0, which supposedly
8987 occur in macroized code. */
8989 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8991 tree cval1 = 0, cval2 = 0;
8994 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8995 /* Don't handle degenerate cases here; they should already
8996 have been handled anyway. */
8997 && cval1 != 0 && cval2 != 0
8998 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8999 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9000 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9001 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9002 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9003 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9004 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9006 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9007 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9009 /* We can't just pass T to eval_subst in case cval1 or cval2
9010 was the same as ARG1. */
9013 = fold_build2 (code, type,
9014 eval_subst (arg0, cval1, maxval,
9018 = fold_build2 (code, type,
9019 eval_subst (arg0, cval1, maxval,
9023 = fold_build2 (code, type,
9024 eval_subst (arg0, cval1, minval,
9028 /* All three of these results should be 0 or 1. Confirm they are.
9029 Then use those values to select the proper code to use. */
9031 if (TREE_CODE (high_result) == INTEGER_CST
9032 && TREE_CODE (equal_result) == INTEGER_CST
9033 && TREE_CODE (low_result) == INTEGER_CST)
9035 /* Make a 3-bit mask with the high-order bit being the
9036 value for `>', the next for '=', and the low for '<'. */
9037 switch ((integer_onep (high_result) * 4)
9038 + (integer_onep (equal_result) * 2)
9039 + integer_onep (low_result))
9043 return omit_one_operand (type, integer_zero_node, arg0);
9064 return omit_one_operand (type, integer_one_node, arg0);
9068 return save_expr (build2 (code, type, cval1, cval2));
9069 return fold_build2 (code, type, cval1, cval2);
9074 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9075 into a single range test. */
9076 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9077 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9078 && TREE_CODE (arg1) == INTEGER_CST
9079 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9080 && !integer_zerop (TREE_OPERAND (arg0, 1))
9081 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9082 && !TREE_OVERFLOW (arg1))
9084 tem = fold_div_compare (code, type, arg0, arg1);
9085 if (tem != NULL_TREE)
9089 /* Fold ~X op ~Y as Y op X. */
9090 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9091 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9093 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9094 return fold_build2 (code, type,
9095 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9096 TREE_OPERAND (arg0, 0));
9099 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9100 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9101 && TREE_CODE (arg1) == INTEGER_CST)
9103 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9104 return fold_build2 (swap_tree_comparison (code), type,
9105 TREE_OPERAND (arg0, 0),
9106 fold_build1 (BIT_NOT_EXPR, cmp_type,
9107 fold_convert (cmp_type, arg1)));
9114 /* Subroutine of fold_binary. Optimize complex multiplications of the
9115 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9116 argument EXPR represents the expression "z" of type TYPE. */
9119 fold_mult_zconjz (tree type, tree expr)
9121 tree itype = TREE_TYPE (type);
9122 tree rpart, ipart, tem;
9124 if (TREE_CODE (expr) == COMPLEX_EXPR)
9126 rpart = TREE_OPERAND (expr, 0);
9127 ipart = TREE_OPERAND (expr, 1);
9129 else if (TREE_CODE (expr) == COMPLEX_CST)
9131 rpart = TREE_REALPART (expr);
9132 ipart = TREE_IMAGPART (expr);
9136 expr = save_expr (expr);
9137 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9138 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9141 rpart = save_expr (rpart);
9142 ipart = save_expr (ipart);
9143 tem = fold_build2 (PLUS_EXPR, itype,
9144 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9145 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9146 return fold_build2 (COMPLEX_EXPR, type, tem,
9147 fold_convert (itype, integer_zero_node));
9151 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9152 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9153 guarantees that P and N have the same least significant log2(M) bits.
9154 N is not otherwise constrained. In particular, N is not normalized to
9155 0 <= N < M as is common. In general, the precise value of P is unknown.
9156 M is chosen as large as possible such that constant N can be determined.
9158 Returns M and sets *RESIDUE to N. */
9160 static unsigned HOST_WIDE_INT
9161 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9163 enum tree_code code;
9167 code = TREE_CODE (expr);
9168 if (code == ADDR_EXPR)
9170 expr = TREE_OPERAND (expr, 0);
9171 if (handled_component_p (expr))
9173 HOST_WIDE_INT bitsize, bitpos;
9175 enum machine_mode mode;
9176 int unsignedp, volatilep;
9178 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9179 &mode, &unsignedp, &volatilep, false);
9180 *residue = bitpos / BITS_PER_UNIT;
9183 if (TREE_CODE (offset) == INTEGER_CST)
9184 *residue += TREE_INT_CST_LOW (offset);
9186 /* We don't handle more complicated offset expressions. */
9191 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9192 return DECL_ALIGN_UNIT (expr);
9194 else if (code == POINTER_PLUS_EXPR)
9197 unsigned HOST_WIDE_INT modulus;
9198 enum tree_code inner_code;
9200 op0 = TREE_OPERAND (expr, 0);
9202 modulus = get_pointer_modulus_and_residue (op0, residue);
9204 op1 = TREE_OPERAND (expr, 1);
9206 inner_code = TREE_CODE (op1);
9207 if (inner_code == INTEGER_CST)
9209 *residue += TREE_INT_CST_LOW (op1);
9212 else if (inner_code == MULT_EXPR)
9214 op1 = TREE_OPERAND (op1, 1);
9215 if (TREE_CODE (op1) == INTEGER_CST)
9217 unsigned HOST_WIDE_INT align;
9219 /* Compute the greatest power-of-2 divisor of op1. */
9220 align = TREE_INT_CST_LOW (op1);
9223 /* If align is non-zero and less than *modulus, replace
9224 *modulus with align., If align is 0, then either op1 is 0
9225 or the greatest power-of-2 divisor of op1 doesn't fit in an
9226 unsigned HOST_WIDE_INT. In either case, no additional
9227 constraint is imposed. */
9229 modulus = MIN (modulus, align);
9236 /* If we get here, we were unable to determine anything useful about the
9242 /* Fold a binary expression of code CODE and type TYPE with operands
9243 OP0 and OP1. Return the folded expression if folding is
9244 successful. Otherwise, return NULL_TREE. */
9247 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9249 enum tree_code_class kind = TREE_CODE_CLASS (code);
9250 tree arg0, arg1, tem;
9251 tree t1 = NULL_TREE;
9252 bool strict_overflow_p;
9254 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9255 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9256 && TREE_CODE_LENGTH (code) == 2
9258 && op1 != NULL_TREE);
9263 /* Strip any conversions that don't change the mode. This is
9264 safe for every expression, except for a comparison expression
9265 because its signedness is derived from its operands. So, in
9266 the latter case, only strip conversions that don't change the
9267 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9270 Note that this is done as an internal manipulation within the
9271 constant folder, in order to find the simplest representation
9272 of the arguments so that their form can be studied. In any
9273 cases, the appropriate type conversions should be put back in
9274 the tree that will get out of the constant folder. */
9276 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9278 STRIP_SIGN_NOPS (arg0);
9279 STRIP_SIGN_NOPS (arg1);
9287 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9288 constant but we can't do arithmetic on them. */
9289 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9290 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9291 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9292 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9293 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9294 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9296 if (kind == tcc_binary)
9298 /* Make sure type and arg0 have the same saturating flag. */
9299 gcc_assert (TYPE_SATURATING (type)
9300 == TYPE_SATURATING (TREE_TYPE (arg0)));
9301 tem = const_binop (code, arg0, arg1, 0);
9303 else if (kind == tcc_comparison)
9304 tem = fold_relational_const (code, type, arg0, arg1);
9308 if (tem != NULL_TREE)
9310 if (TREE_TYPE (tem) != type)
9311 tem = fold_convert (type, tem);
9316 /* If this is a commutative operation, and ARG0 is a constant, move it
9317 to ARG1 to reduce the number of tests below. */
9318 if (commutative_tree_code (code)
9319 && tree_swap_operands_p (arg0, arg1, true))
9320 return fold_build2 (code, type, op1, op0);
9322 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9324 First check for cases where an arithmetic operation is applied to a
9325 compound, conditional, or comparison operation. Push the arithmetic
9326 operation inside the compound or conditional to see if any folding
9327 can then be done. Convert comparison to conditional for this purpose.
9328 The also optimizes non-constant cases that used to be done in
9331 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9332 one of the operands is a comparison and the other is a comparison, a
9333 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9334 code below would make the expression more complex. Change it to a
9335 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9336 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9338 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9339 || code == EQ_EXPR || code == NE_EXPR)
9340 && ((truth_value_p (TREE_CODE (arg0))
9341 && (truth_value_p (TREE_CODE (arg1))
9342 || (TREE_CODE (arg1) == BIT_AND_EXPR
9343 && integer_onep (TREE_OPERAND (arg1, 1)))))
9344 || (truth_value_p (TREE_CODE (arg1))
9345 && (truth_value_p (TREE_CODE (arg0))
9346 || (TREE_CODE (arg0) == BIT_AND_EXPR
9347 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9349 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9350 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9353 fold_convert (boolean_type_node, arg0),
9354 fold_convert (boolean_type_node, arg1));
9356 if (code == EQ_EXPR)
9357 tem = invert_truthvalue (tem);
9359 return fold_convert (type, tem);
9362 if (TREE_CODE_CLASS (code) == tcc_binary
9363 || TREE_CODE_CLASS (code) == tcc_comparison)
9365 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9366 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9367 fold_build2 (code, type,
9368 fold_convert (TREE_TYPE (op0),
9369 TREE_OPERAND (arg0, 1)),
9371 if (TREE_CODE (arg1) == COMPOUND_EXPR
9372 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9373 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9374 fold_build2 (code, type, op0,
9375 fold_convert (TREE_TYPE (op1),
9376 TREE_OPERAND (arg1, 1))));
9378 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9380 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9382 /*cond_first_p=*/1);
9383 if (tem != NULL_TREE)
9387 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9389 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9391 /*cond_first_p=*/0);
9392 if (tem != NULL_TREE)
9399 case POINTER_PLUS_EXPR:
9400 /* 0 +p index -> (type)index */
9401 if (integer_zerop (arg0))
9402 return non_lvalue (fold_convert (type, arg1));
9404 /* PTR +p 0 -> PTR */
9405 if (integer_zerop (arg1))
9406 return non_lvalue (fold_convert (type, arg0));
9408 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9409 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9410 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9411 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9412 fold_convert (sizetype, arg1),
9413 fold_convert (sizetype, arg0)));
9415 /* index +p PTR -> PTR +p index */
9416 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9417 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9418 return fold_build2 (POINTER_PLUS_EXPR, type,
9419 fold_convert (type, arg1),
9420 fold_convert (sizetype, arg0));
9422 /* (PTR +p B) +p A -> PTR +p (B + A) */
9423 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9426 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9427 tree arg00 = TREE_OPERAND (arg0, 0);
9428 inner = fold_build2 (PLUS_EXPR, sizetype,
9429 arg01, fold_convert (sizetype, arg1));
9430 return fold_convert (type,
9431 fold_build2 (POINTER_PLUS_EXPR,
9432 TREE_TYPE (arg00), arg00, inner));
9435 /* PTR_CST +p CST -> CST1 */
9436 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9437 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9439 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9440 of the array. Loop optimizer sometimes produce this type of
9442 if (TREE_CODE (arg0) == ADDR_EXPR)
9444 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9446 return fold_convert (type, tem);
9452 /* PTR + INT -> (INT)(PTR p+ INT) */
9453 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9454 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9455 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9458 fold_convert (sizetype, arg1)));
9459 /* INT + PTR -> (INT)(PTR p+ INT) */
9460 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9461 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9462 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9465 fold_convert (sizetype, arg0)));
9466 /* A + (-B) -> A - B */
9467 if (TREE_CODE (arg1) == NEGATE_EXPR)
9468 return fold_build2 (MINUS_EXPR, type,
9469 fold_convert (type, arg0),
9470 fold_convert (type, TREE_OPERAND (arg1, 0)));
9471 /* (-A) + B -> B - A */
9472 if (TREE_CODE (arg0) == NEGATE_EXPR
9473 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9474 return fold_build2 (MINUS_EXPR, type,
9475 fold_convert (type, arg1),
9476 fold_convert (type, TREE_OPERAND (arg0, 0)));
9478 if (INTEGRAL_TYPE_P (type))
9480 /* Convert ~A + 1 to -A. */
9481 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9482 && integer_onep (arg1))
9483 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9486 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9487 && !TYPE_OVERFLOW_TRAPS (type))
9489 tree tem = TREE_OPERAND (arg0, 0);
9492 if (operand_equal_p (tem, arg1, 0))
9494 t1 = build_int_cst_type (type, -1);
9495 return omit_one_operand (type, t1, arg1);
9500 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9501 && !TYPE_OVERFLOW_TRAPS (type))
9503 tree tem = TREE_OPERAND (arg1, 0);
9506 if (operand_equal_p (arg0, tem, 0))
9508 t1 = build_int_cst_type (type, -1);
9509 return omit_one_operand (type, t1, arg0);
9513 /* X + (X / CST) * -CST is X % CST. */
9514 if (TREE_CODE (arg1) == MULT_EXPR
9515 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9516 && operand_equal_p (arg0,
9517 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9519 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9520 tree cst1 = TREE_OPERAND (arg1, 1);
9521 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9522 if (sum && integer_zerop (sum))
9523 return fold_convert (type,
9524 fold_build2 (TRUNC_MOD_EXPR,
9525 TREE_TYPE (arg0), arg0, cst0));
9529 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9530 same or one. Make sure type is not saturating.
9531 fold_plusminus_mult_expr will re-associate. */
9532 if ((TREE_CODE (arg0) == MULT_EXPR
9533 || TREE_CODE (arg1) == MULT_EXPR)
9534 && !TYPE_SATURATING (type)
9535 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9537 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9542 if (! FLOAT_TYPE_P (type))
9544 if (integer_zerop (arg1))
9545 return non_lvalue (fold_convert (type, arg0));
9547 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9548 with a constant, and the two constants have no bits in common,
9549 we should treat this as a BIT_IOR_EXPR since this may produce more
9551 if (TREE_CODE (arg0) == BIT_AND_EXPR
9552 && TREE_CODE (arg1) == BIT_AND_EXPR
9553 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9554 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9555 && integer_zerop (const_binop (BIT_AND_EXPR,
9556 TREE_OPERAND (arg0, 1),
9557 TREE_OPERAND (arg1, 1), 0)))
9559 code = BIT_IOR_EXPR;
9563 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9564 (plus (plus (mult) (mult)) (foo)) so that we can
9565 take advantage of the factoring cases below. */
9566 if (((TREE_CODE (arg0) == PLUS_EXPR
9567 || TREE_CODE (arg0) == MINUS_EXPR)
9568 && TREE_CODE (arg1) == MULT_EXPR)
9569 || ((TREE_CODE (arg1) == PLUS_EXPR
9570 || TREE_CODE (arg1) == MINUS_EXPR)
9571 && TREE_CODE (arg0) == MULT_EXPR))
9573 tree parg0, parg1, parg, marg;
9574 enum tree_code pcode;
9576 if (TREE_CODE (arg1) == MULT_EXPR)
9577 parg = arg0, marg = arg1;
9579 parg = arg1, marg = arg0;
9580 pcode = TREE_CODE (parg);
9581 parg0 = TREE_OPERAND (parg, 0);
9582 parg1 = TREE_OPERAND (parg, 1);
9586 if (TREE_CODE (parg0) == MULT_EXPR
9587 && TREE_CODE (parg1) != MULT_EXPR)
9588 return fold_build2 (pcode, type,
9589 fold_build2 (PLUS_EXPR, type,
9590 fold_convert (type, parg0),
9591 fold_convert (type, marg)),
9592 fold_convert (type, parg1));
9593 if (TREE_CODE (parg0) != MULT_EXPR
9594 && TREE_CODE (parg1) == MULT_EXPR)
9595 return fold_build2 (PLUS_EXPR, type,
9596 fold_convert (type, parg0),
9597 fold_build2 (pcode, type,
9598 fold_convert (type, marg),
9605 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9606 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9607 return non_lvalue (fold_convert (type, arg0));
9609 /* Likewise if the operands are reversed. */
9610 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9611 return non_lvalue (fold_convert (type, arg1));
9613 /* Convert X + -C into X - C. */
9614 if (TREE_CODE (arg1) == REAL_CST
9615 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9617 tem = fold_negate_const (arg1, type);
9618 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9619 return fold_build2 (MINUS_EXPR, type,
9620 fold_convert (type, arg0),
9621 fold_convert (type, tem));
9624 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9625 to __complex__ ( x, y ). This is not the same for SNaNs or
9626 if signed zeros are involved. */
9627 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9628 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9629 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9631 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9632 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9633 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9634 bool arg0rz = false, arg0iz = false;
9635 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9636 || (arg0i && (arg0iz = real_zerop (arg0i))))
9638 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9639 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9640 if (arg0rz && arg1i && real_zerop (arg1i))
9642 tree rp = arg1r ? arg1r
9643 : build1 (REALPART_EXPR, rtype, arg1);
9644 tree ip = arg0i ? arg0i
9645 : build1 (IMAGPART_EXPR, rtype, arg0);
9646 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9648 else if (arg0iz && arg1r && real_zerop (arg1r))
9650 tree rp = arg0r ? arg0r
9651 : build1 (REALPART_EXPR, rtype, arg0);
9652 tree ip = arg1i ? arg1i
9653 : build1 (IMAGPART_EXPR, rtype, arg1);
9654 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9659 if (flag_unsafe_math_optimizations
9660 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9661 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9662 && (tem = distribute_real_division (code, type, arg0, arg1)))
9665 /* Convert x+x into x*2.0. */
9666 if (operand_equal_p (arg0, arg1, 0)
9667 && SCALAR_FLOAT_TYPE_P (type))
9668 return fold_build2 (MULT_EXPR, type, arg0,
9669 build_real (type, dconst2));
9671 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9672 We associate floats only if the user has specified
9673 -fassociative-math. */
9674 if (flag_associative_math
9675 && TREE_CODE (arg1) == PLUS_EXPR
9676 && TREE_CODE (arg0) != MULT_EXPR)
9678 tree tree10 = TREE_OPERAND (arg1, 0);
9679 tree tree11 = TREE_OPERAND (arg1, 1);
9680 if (TREE_CODE (tree11) == MULT_EXPR
9681 && TREE_CODE (tree10) == MULT_EXPR)
9684 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9685 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9688 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9689 We associate floats only if the user has specified
9690 -fassociative-math. */
9691 if (flag_associative_math
9692 && TREE_CODE (arg0) == PLUS_EXPR
9693 && TREE_CODE (arg1) != MULT_EXPR)
9695 tree tree00 = TREE_OPERAND (arg0, 0);
9696 tree tree01 = TREE_OPERAND (arg0, 1);
9697 if (TREE_CODE (tree01) == MULT_EXPR
9698 && TREE_CODE (tree00) == MULT_EXPR)
9701 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9702 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9708 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9709 is a rotate of A by C1 bits. */
9710 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9711 is a rotate of A by B bits. */
9713 enum tree_code code0, code1;
9715 code0 = TREE_CODE (arg0);
9716 code1 = TREE_CODE (arg1);
9717 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9718 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9719 && operand_equal_p (TREE_OPERAND (arg0, 0),
9720 TREE_OPERAND (arg1, 0), 0)
9721 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9722 TYPE_UNSIGNED (rtype))
9723 /* Only create rotates in complete modes. Other cases are not
9724 expanded properly. */
9725 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9727 tree tree01, tree11;
9728 enum tree_code code01, code11;
9730 tree01 = TREE_OPERAND (arg0, 1);
9731 tree11 = TREE_OPERAND (arg1, 1);
9732 STRIP_NOPS (tree01);
9733 STRIP_NOPS (tree11);
9734 code01 = TREE_CODE (tree01);
9735 code11 = TREE_CODE (tree11);
9736 if (code01 == INTEGER_CST
9737 && code11 == INTEGER_CST
9738 && TREE_INT_CST_HIGH (tree01) == 0
9739 && TREE_INT_CST_HIGH (tree11) == 0
9740 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9741 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9742 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9743 code0 == LSHIFT_EXPR ? tree01 : tree11);
9744 else if (code11 == MINUS_EXPR)
9746 tree tree110, tree111;
9747 tree110 = TREE_OPERAND (tree11, 0);
9748 tree111 = TREE_OPERAND (tree11, 1);
9749 STRIP_NOPS (tree110);
9750 STRIP_NOPS (tree111);
9751 if (TREE_CODE (tree110) == INTEGER_CST
9752 && 0 == compare_tree_int (tree110,
9754 (TREE_TYPE (TREE_OPERAND
9756 && operand_equal_p (tree01, tree111, 0))
9757 return build2 ((code0 == LSHIFT_EXPR
9760 type, TREE_OPERAND (arg0, 0), tree01);
9762 else if (code01 == MINUS_EXPR)
9764 tree tree010, tree011;
9765 tree010 = TREE_OPERAND (tree01, 0);
9766 tree011 = TREE_OPERAND (tree01, 1);
9767 STRIP_NOPS (tree010);
9768 STRIP_NOPS (tree011);
9769 if (TREE_CODE (tree010) == INTEGER_CST
9770 && 0 == compare_tree_int (tree010,
9772 (TREE_TYPE (TREE_OPERAND
9774 && operand_equal_p (tree11, tree011, 0))
9775 return build2 ((code0 != LSHIFT_EXPR
9778 type, TREE_OPERAND (arg0, 0), tree11);
9784 /* In most languages, can't associate operations on floats through
9785 parentheses. Rather than remember where the parentheses were, we
9786 don't associate floats at all, unless the user has specified
9788 And, we need to make sure type is not saturating. */
9790 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9791 && !TYPE_SATURATING (type))
9793 tree var0, con0, lit0, minus_lit0;
9794 tree var1, con1, lit1, minus_lit1;
9797 /* Split both trees into variables, constants, and literals. Then
9798 associate each group together, the constants with literals,
9799 then the result with variables. This increases the chances of
9800 literals being recombined later and of generating relocatable
9801 expressions for the sum of a constant and literal. */
9802 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9803 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9804 code == MINUS_EXPR);
9806 /* With undefined overflow we can only associate constants
9807 with one variable. */
9808 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9809 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9815 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9816 tmp0 = TREE_OPERAND (tmp0, 0);
9817 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9818 tmp1 = TREE_OPERAND (tmp1, 0);
9819 /* The only case we can still associate with two variables
9820 is if they are the same, modulo negation. */
9821 if (!operand_equal_p (tmp0, tmp1, 0))
9825 /* Only do something if we found more than two objects. Otherwise,
9826 nothing has changed and we risk infinite recursion. */
9828 && (2 < ((var0 != 0) + (var1 != 0)
9829 + (con0 != 0) + (con1 != 0)
9830 + (lit0 != 0) + (lit1 != 0)
9831 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9833 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9834 if (code == MINUS_EXPR)
9837 var0 = associate_trees (var0, var1, code, type);
9838 con0 = associate_trees (con0, con1, code, type);
9839 lit0 = associate_trees (lit0, lit1, code, type);
9840 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9842 /* Preserve the MINUS_EXPR if the negative part of the literal is
9843 greater than the positive part. Otherwise, the multiplicative
9844 folding code (i.e extract_muldiv) may be fooled in case
9845 unsigned constants are subtracted, like in the following
9846 example: ((X*2 + 4) - 8U)/2. */
9847 if (minus_lit0 && lit0)
9849 if (TREE_CODE (lit0) == INTEGER_CST
9850 && TREE_CODE (minus_lit0) == INTEGER_CST
9851 && tree_int_cst_lt (lit0, minus_lit0))
9853 minus_lit0 = associate_trees (minus_lit0, lit0,
9859 lit0 = associate_trees (lit0, minus_lit0,
9867 return fold_convert (type,
9868 associate_trees (var0, minus_lit0,
9872 con0 = associate_trees (con0, minus_lit0,
9874 return fold_convert (type,
9875 associate_trees (var0, con0,
9880 con0 = associate_trees (con0, lit0, code, type);
9881 return fold_convert (type, associate_trees (var0, con0,
9889 /* Pointer simplifications for subtraction, simple reassociations. */
9890 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9892 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9893 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9894 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9896 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9897 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9898 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9899 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9900 return fold_build2 (PLUS_EXPR, type,
9901 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9902 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9904 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9905 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9907 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9908 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9909 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9911 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9914 /* A - (-B) -> A + B */
9915 if (TREE_CODE (arg1) == NEGATE_EXPR)
9916 return fold_build2 (PLUS_EXPR, type, op0,
9917 fold_convert (type, TREE_OPERAND (arg1, 0)));
9918 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9919 if (TREE_CODE (arg0) == NEGATE_EXPR
9920 && (FLOAT_TYPE_P (type)
9921 || INTEGRAL_TYPE_P (type))
9922 && negate_expr_p (arg1)
9923 && reorder_operands_p (arg0, arg1))
9924 return fold_build2 (MINUS_EXPR, type,
9925 fold_convert (type, negate_expr (arg1)),
9926 fold_convert (type, TREE_OPERAND (arg0, 0)));
9927 /* Convert -A - 1 to ~A. */
9928 if (INTEGRAL_TYPE_P (type)
9929 && TREE_CODE (arg0) == NEGATE_EXPR
9930 && integer_onep (arg1)
9931 && !TYPE_OVERFLOW_TRAPS (type))
9932 return fold_build1 (BIT_NOT_EXPR, type,
9933 fold_convert (type, TREE_OPERAND (arg0, 0)));
9935 /* Convert -1 - A to ~A. */
9936 if (INTEGRAL_TYPE_P (type)
9937 && integer_all_onesp (arg0))
9938 return fold_build1 (BIT_NOT_EXPR, type, op1);
9941 /* X - (X / CST) * CST is X % CST. */
9942 if (INTEGRAL_TYPE_P (type)
9943 && TREE_CODE (arg1) == MULT_EXPR
9944 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9945 && operand_equal_p (arg0,
9946 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9947 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9948 TREE_OPERAND (arg1, 1), 0))
9949 return fold_convert (type,
9950 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9951 arg0, TREE_OPERAND (arg1, 1)));
9953 if (! FLOAT_TYPE_P (type))
9955 if (integer_zerop (arg0))
9956 return negate_expr (fold_convert (type, arg1));
9957 if (integer_zerop (arg1))
9958 return non_lvalue (fold_convert (type, arg0));
9960 /* Fold A - (A & B) into ~B & A. */
9961 if (!TREE_SIDE_EFFECTS (arg0)
9962 && TREE_CODE (arg1) == BIT_AND_EXPR)
9964 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9966 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9967 return fold_build2 (BIT_AND_EXPR, type,
9968 fold_build1 (BIT_NOT_EXPR, type, arg10),
9969 fold_convert (type, arg0));
9971 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9973 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9974 return fold_build2 (BIT_AND_EXPR, type,
9975 fold_build1 (BIT_NOT_EXPR, type, arg11),
9976 fold_convert (type, arg0));
9980 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9981 any power of 2 minus 1. */
9982 if (TREE_CODE (arg0) == BIT_AND_EXPR
9983 && TREE_CODE (arg1) == BIT_AND_EXPR
9984 && operand_equal_p (TREE_OPERAND (arg0, 0),
9985 TREE_OPERAND (arg1, 0), 0))
9987 tree mask0 = TREE_OPERAND (arg0, 1);
9988 tree mask1 = TREE_OPERAND (arg1, 1);
9989 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9991 if (operand_equal_p (tem, mask1, 0))
9993 tem = fold_build2 (BIT_XOR_EXPR, type,
9994 TREE_OPERAND (arg0, 0), mask1);
9995 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10000 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10001 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10002 return non_lvalue (fold_convert (type, arg0));
10004 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10005 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10006 (-ARG1 + ARG0) reduces to -ARG1. */
10007 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10008 return negate_expr (fold_convert (type, arg1));
10010 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10011 __complex__ ( x, -y ). This is not the same for SNaNs or if
10012 signed zeros are involved. */
10013 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10014 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10015 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10017 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10018 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10019 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10020 bool arg0rz = false, arg0iz = false;
10021 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10022 || (arg0i && (arg0iz = real_zerop (arg0i))))
10024 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10025 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10026 if (arg0rz && arg1i && real_zerop (arg1i))
10028 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10030 : build1 (REALPART_EXPR, rtype, arg1));
10031 tree ip = arg0i ? arg0i
10032 : build1 (IMAGPART_EXPR, rtype, arg0);
10033 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10035 else if (arg0iz && arg1r && real_zerop (arg1r))
10037 tree rp = arg0r ? arg0r
10038 : build1 (REALPART_EXPR, rtype, arg0);
10039 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10041 : build1 (IMAGPART_EXPR, rtype, arg1));
10042 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10047 /* Fold &x - &x. This can happen from &x.foo - &x.
10048 This is unsafe for certain floats even in non-IEEE formats.
10049 In IEEE, it is unsafe because it does wrong for NaNs.
10050 Also note that operand_equal_p is always false if an operand
10053 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10054 && operand_equal_p (arg0, arg1, 0))
10055 return fold_convert (type, integer_zero_node);
10057 /* A - B -> A + (-B) if B is easily negatable. */
10058 if (negate_expr_p (arg1)
10059 && ((FLOAT_TYPE_P (type)
10060 /* Avoid this transformation if B is a positive REAL_CST. */
10061 && (TREE_CODE (arg1) != REAL_CST
10062 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10063 || INTEGRAL_TYPE_P (type)))
10064 return fold_build2 (PLUS_EXPR, type,
10065 fold_convert (type, arg0),
10066 fold_convert (type, negate_expr (arg1)));
10068 /* Try folding difference of addresses. */
10070 HOST_WIDE_INT diff;
10072 if ((TREE_CODE (arg0) == ADDR_EXPR
10073 || TREE_CODE (arg1) == ADDR_EXPR)
10074 && ptr_difference_const (arg0, arg1, &diff))
10075 return build_int_cst_type (type, diff);
10078 /* Fold &a[i] - &a[j] to i-j. */
10079 if (TREE_CODE (arg0) == ADDR_EXPR
10080 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10081 && TREE_CODE (arg1) == ADDR_EXPR
10082 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10084 tree aref0 = TREE_OPERAND (arg0, 0);
10085 tree aref1 = TREE_OPERAND (arg1, 0);
10086 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10087 TREE_OPERAND (aref1, 0), 0))
10089 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10090 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10091 tree esz = array_ref_element_size (aref0);
10092 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10093 return fold_build2 (MULT_EXPR, type, diff,
10094 fold_convert (type, esz));
10099 if (flag_unsafe_math_optimizations
10100 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10101 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10102 && (tem = distribute_real_division (code, type, arg0, arg1)))
10105 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10106 same or one. Make sure type is not saturating.
10107 fold_plusminus_mult_expr will re-associate. */
10108 if ((TREE_CODE (arg0) == MULT_EXPR
10109 || TREE_CODE (arg1) == MULT_EXPR)
10110 && !TYPE_SATURATING (type)
10111 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10113 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10121 /* (-A) * (-B) -> A * B */
10122 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10123 return fold_build2 (MULT_EXPR, type,
10124 fold_convert (type, TREE_OPERAND (arg0, 0)),
10125 fold_convert (type, negate_expr (arg1)));
10126 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10127 return fold_build2 (MULT_EXPR, type,
10128 fold_convert (type, negate_expr (arg0)),
10129 fold_convert (type, TREE_OPERAND (arg1, 0)));
10131 if (! FLOAT_TYPE_P (type))
10133 if (integer_zerop (arg1))
10134 return omit_one_operand (type, arg1, arg0);
10135 if (integer_onep (arg1))
10136 return non_lvalue (fold_convert (type, arg0));
10137 /* Transform x * -1 into -x. Make sure to do the negation
10138 on the original operand with conversions not stripped
10139 because we can only strip non-sign-changing conversions. */
10140 if (integer_all_onesp (arg1))
10141 return fold_convert (type, negate_expr (op0));
10142 /* Transform x * -C into -x * C if x is easily negatable. */
10143 if (TREE_CODE (arg1) == INTEGER_CST
10144 && tree_int_cst_sgn (arg1) == -1
10145 && negate_expr_p (arg0)
10146 && (tem = negate_expr (arg1)) != arg1
10147 && !TREE_OVERFLOW (tem))
10148 return fold_build2 (MULT_EXPR, type,
10149 fold_convert (type, negate_expr (arg0)), tem);
10151 /* (a * (1 << b)) is (a << b) */
10152 if (TREE_CODE (arg1) == LSHIFT_EXPR
10153 && integer_onep (TREE_OPERAND (arg1, 0)))
10154 return fold_build2 (LSHIFT_EXPR, type, op0,
10155 TREE_OPERAND (arg1, 1));
10156 if (TREE_CODE (arg0) == LSHIFT_EXPR
10157 && integer_onep (TREE_OPERAND (arg0, 0)))
10158 return fold_build2 (LSHIFT_EXPR, type, op1,
10159 TREE_OPERAND (arg0, 1));
10161 /* (A + A) * C -> A * 2 * C */
10162 if (TREE_CODE (arg0) == PLUS_EXPR
10163 && TREE_CODE (arg1) == INTEGER_CST
10164 && operand_equal_p (TREE_OPERAND (arg0, 0),
10165 TREE_OPERAND (arg0, 1), 0))
10166 return fold_build2 (MULT_EXPR, type,
10167 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10168 TREE_OPERAND (arg0, 1)),
10169 fold_build2 (MULT_EXPR, type,
10170 build_int_cst (type, 2) , arg1));
10172 strict_overflow_p = false;
10173 if (TREE_CODE (arg1) == INTEGER_CST
10174 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10175 &strict_overflow_p)))
10177 if (strict_overflow_p)
10178 fold_overflow_warning (("assuming signed overflow does not "
10179 "occur when simplifying "
10181 WARN_STRICT_OVERFLOW_MISC);
10182 return fold_convert (type, tem);
10185 /* Optimize z * conj(z) for integer complex numbers. */
10186 if (TREE_CODE (arg0) == CONJ_EXPR
10187 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10188 return fold_mult_zconjz (type, arg1);
10189 if (TREE_CODE (arg1) == CONJ_EXPR
10190 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10191 return fold_mult_zconjz (type, arg0);
10195 /* Maybe fold x * 0 to 0. The expressions aren't the same
10196 when x is NaN, since x * 0 is also NaN. Nor are they the
10197 same in modes with signed zeros, since multiplying a
10198 negative value by 0 gives -0, not +0. */
10199 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10200 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10201 && real_zerop (arg1))
10202 return omit_one_operand (type, arg1, arg0);
10203 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10204 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10205 && real_onep (arg1))
10206 return non_lvalue (fold_convert (type, arg0));
10208 /* Transform x * -1.0 into -x. */
10209 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10210 && real_minus_onep (arg1))
10211 return fold_convert (type, negate_expr (arg0));
10213 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10214 the result for floating point types due to rounding so it is applied
10215 only if -fassociative-math was specify. */
10216 if (flag_associative_math
10217 && TREE_CODE (arg0) == RDIV_EXPR
10218 && TREE_CODE (arg1) == REAL_CST
10219 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10221 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10224 return fold_build2 (RDIV_EXPR, type, tem,
10225 TREE_OPERAND (arg0, 1));
10228 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10229 if (operand_equal_p (arg0, arg1, 0))
10231 tree tem = fold_strip_sign_ops (arg0);
10232 if (tem != NULL_TREE)
10234 tem = fold_convert (type, tem);
10235 return fold_build2 (MULT_EXPR, type, tem, tem);
10239 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10240 This is not the same for NaNs or if signed zeros are
10242 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10243 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10244 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10245 && TREE_CODE (arg1) == COMPLEX_CST
10246 && real_zerop (TREE_REALPART (arg1)))
10248 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10249 if (real_onep (TREE_IMAGPART (arg1)))
10250 return fold_build2 (COMPLEX_EXPR, type,
10251 negate_expr (fold_build1 (IMAGPART_EXPR,
10253 fold_build1 (REALPART_EXPR, rtype, arg0));
10254 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10255 return fold_build2 (COMPLEX_EXPR, type,
10256 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10257 negate_expr (fold_build1 (REALPART_EXPR,
10261 /* Optimize z * conj(z) for floating point complex numbers.
10262 Guarded by flag_unsafe_math_optimizations as non-finite
10263 imaginary components don't produce scalar results. */
10264 if (flag_unsafe_math_optimizations
10265 && TREE_CODE (arg0) == CONJ_EXPR
10266 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10267 return fold_mult_zconjz (type, arg1);
10268 if (flag_unsafe_math_optimizations
10269 && TREE_CODE (arg1) == CONJ_EXPR
10270 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10271 return fold_mult_zconjz (type, arg0);
10273 if (flag_unsafe_math_optimizations)
10275 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10276 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10278 /* Optimizations of root(...)*root(...). */
10279 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10282 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10283 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10285 /* Optimize sqrt(x)*sqrt(x) as x. */
10286 if (BUILTIN_SQRT_P (fcode0)
10287 && operand_equal_p (arg00, arg10, 0)
10288 && ! HONOR_SNANS (TYPE_MODE (type)))
10291 /* Optimize root(x)*root(y) as root(x*y). */
10292 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10293 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10294 return build_call_expr (rootfn, 1, arg);
10297 /* Optimize expN(x)*expN(y) as expN(x+y). */
10298 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10300 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10301 tree arg = fold_build2 (PLUS_EXPR, type,
10302 CALL_EXPR_ARG (arg0, 0),
10303 CALL_EXPR_ARG (arg1, 0));
10304 return build_call_expr (expfn, 1, arg);
10307 /* Optimizations of pow(...)*pow(...). */
10308 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10309 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10310 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10312 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10313 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10314 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10315 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10317 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10318 if (operand_equal_p (arg01, arg11, 0))
10320 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10321 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10322 return build_call_expr (powfn, 2, arg, arg01);
10325 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10326 if (operand_equal_p (arg00, arg10, 0))
10328 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10329 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10330 return build_call_expr (powfn, 2, arg00, arg);
10334 /* Optimize tan(x)*cos(x) as sin(x). */
10335 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10336 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10337 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10338 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10339 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10340 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10341 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10342 CALL_EXPR_ARG (arg1, 0), 0))
10344 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10346 if (sinfn != NULL_TREE)
10347 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10350 /* Optimize x*pow(x,c) as pow(x,c+1). */
10351 if (fcode1 == BUILT_IN_POW
10352 || fcode1 == BUILT_IN_POWF
10353 || fcode1 == BUILT_IN_POWL)
10355 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10356 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10357 if (TREE_CODE (arg11) == REAL_CST
10358 && !TREE_OVERFLOW (arg11)
10359 && operand_equal_p (arg0, arg10, 0))
10361 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10365 c = TREE_REAL_CST (arg11);
10366 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10367 arg = build_real (type, c);
10368 return build_call_expr (powfn, 2, arg0, arg);
10372 /* Optimize pow(x,c)*x as pow(x,c+1). */
10373 if (fcode0 == BUILT_IN_POW
10374 || fcode0 == BUILT_IN_POWF
10375 || fcode0 == BUILT_IN_POWL)
10377 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10378 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10379 if (TREE_CODE (arg01) == REAL_CST
10380 && !TREE_OVERFLOW (arg01)
10381 && operand_equal_p (arg1, arg00, 0))
10383 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10387 c = TREE_REAL_CST (arg01);
10388 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10389 arg = build_real (type, c);
10390 return build_call_expr (powfn, 2, arg1, arg);
10394 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10395 if (! optimize_size
10396 && operand_equal_p (arg0, arg1, 0))
10398 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10402 tree arg = build_real (type, dconst2);
10403 return build_call_expr (powfn, 2, arg0, arg);
10412 if (integer_all_onesp (arg1))
10413 return omit_one_operand (type, arg1, arg0);
10414 if (integer_zerop (arg1))
10415 return non_lvalue (fold_convert (type, arg0));
10416 if (operand_equal_p (arg0, arg1, 0))
10417 return non_lvalue (fold_convert (type, arg0));
10419 /* ~X | X is -1. */
10420 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10421 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10423 t1 = fold_convert (type, integer_zero_node);
10424 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10425 return omit_one_operand (type, t1, arg1);
10428 /* X | ~X is -1. */
10429 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10430 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10432 t1 = fold_convert (type, integer_zero_node);
10433 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10434 return omit_one_operand (type, t1, arg0);
10437 /* Canonicalize (X & C1) | C2. */
10438 if (TREE_CODE (arg0) == BIT_AND_EXPR
10439 && TREE_CODE (arg1) == INTEGER_CST
10440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10442 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10443 int width = TYPE_PRECISION (type), w;
10444 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10445 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10446 hi2 = TREE_INT_CST_HIGH (arg1);
10447 lo2 = TREE_INT_CST_LOW (arg1);
10449 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10450 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10451 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10453 if (width > HOST_BITS_PER_WIDE_INT)
10455 mhi = (unsigned HOST_WIDE_INT) -1
10456 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10462 mlo = (unsigned HOST_WIDE_INT) -1
10463 >> (HOST_BITS_PER_WIDE_INT - width);
10466 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10467 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10468 return fold_build2 (BIT_IOR_EXPR, type,
10469 TREE_OPERAND (arg0, 0), arg1);
10471 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10472 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10473 mode which allows further optimizations. */
10480 for (w = BITS_PER_UNIT;
10481 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10484 unsigned HOST_WIDE_INT mask
10485 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10486 if (((lo1 | lo2) & mask) == mask
10487 && (lo1 & ~mask) == 0 && hi1 == 0)
10494 if (hi3 != hi1 || lo3 != lo1)
10495 return fold_build2 (BIT_IOR_EXPR, type,
10496 fold_build2 (BIT_AND_EXPR, type,
10497 TREE_OPERAND (arg0, 0),
10498 build_int_cst_wide (type,
10503 /* (X & Y) | Y is (X, Y). */
10504 if (TREE_CODE (arg0) == BIT_AND_EXPR
10505 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10506 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10507 /* (X & Y) | X is (Y, X). */
10508 if (TREE_CODE (arg0) == BIT_AND_EXPR
10509 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10510 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10511 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10512 /* X | (X & Y) is (Y, X). */
10513 if (TREE_CODE (arg1) == BIT_AND_EXPR
10514 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10515 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10516 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10517 /* X | (Y & X) is (Y, X). */
10518 if (TREE_CODE (arg1) == BIT_AND_EXPR
10519 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10520 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10521 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10523 t1 = distribute_bit_expr (code, type, arg0, arg1);
10524 if (t1 != NULL_TREE)
10527 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10529 This results in more efficient code for machines without a NAND
10530 instruction. Combine will canonicalize to the first form
10531 which will allow use of NAND instructions provided by the
10532 backend if they exist. */
10533 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10534 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10536 return fold_build1 (BIT_NOT_EXPR, type,
10537 build2 (BIT_AND_EXPR, type,
10538 fold_convert (type,
10539 TREE_OPERAND (arg0, 0)),
10540 fold_convert (type,
10541 TREE_OPERAND (arg1, 0))));
10544 /* See if this can be simplified into a rotate first. If that
10545 is unsuccessful continue in the association code. */
10549 if (integer_zerop (arg1))
10550 return non_lvalue (fold_convert (type, arg0));
10551 if (integer_all_onesp (arg1))
10552 return fold_build1 (BIT_NOT_EXPR, type, op0);
10553 if (operand_equal_p (arg0, arg1, 0))
10554 return omit_one_operand (type, integer_zero_node, arg0);
10556 /* ~X ^ X is -1. */
10557 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10558 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10560 t1 = fold_convert (type, integer_zero_node);
10561 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10562 return omit_one_operand (type, t1, arg1);
10565 /* X ^ ~X is -1. */
10566 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10567 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10569 t1 = fold_convert (type, integer_zero_node);
10570 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10571 return omit_one_operand (type, t1, arg0);
10574 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10575 with a constant, and the two constants have no bits in common,
10576 we should treat this as a BIT_IOR_EXPR since this may produce more
10577 simplifications. */
10578 if (TREE_CODE (arg0) == BIT_AND_EXPR
10579 && TREE_CODE (arg1) == BIT_AND_EXPR
10580 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10581 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10582 && integer_zerop (const_binop (BIT_AND_EXPR,
10583 TREE_OPERAND (arg0, 1),
10584 TREE_OPERAND (arg1, 1), 0)))
10586 code = BIT_IOR_EXPR;
10590 /* (X | Y) ^ X -> Y & ~ X*/
10591 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10592 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10594 tree t2 = TREE_OPERAND (arg0, 1);
10595 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10597 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10598 fold_convert (type, t1));
10602 /* (Y | X) ^ X -> Y & ~ X*/
10603 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10604 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10606 tree t2 = TREE_OPERAND (arg0, 0);
10607 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10609 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10610 fold_convert (type, t1));
10614 /* X ^ (X | Y) -> Y & ~ X*/
10615 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10616 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10618 tree t2 = TREE_OPERAND (arg1, 1);
10619 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10621 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10622 fold_convert (type, t1));
10626 /* X ^ (Y | X) -> Y & ~ X*/
10627 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10628 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10630 tree t2 = TREE_OPERAND (arg1, 0);
10631 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10633 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10634 fold_convert (type, t1));
10638 /* Convert ~X ^ ~Y to X ^ Y. */
10639 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10640 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10641 return fold_build2 (code, type,
10642 fold_convert (type, TREE_OPERAND (arg0, 0)),
10643 fold_convert (type, TREE_OPERAND (arg1, 0)));
10645 /* Convert ~X ^ C to X ^ ~C. */
10646 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10647 && TREE_CODE (arg1) == INTEGER_CST)
10648 return fold_build2 (code, type,
10649 fold_convert (type, TREE_OPERAND (arg0, 0)),
10650 fold_build1 (BIT_NOT_EXPR, type, arg1));
10652 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10653 if (TREE_CODE (arg0) == BIT_AND_EXPR
10654 && integer_onep (TREE_OPERAND (arg0, 1))
10655 && integer_onep (arg1))
10656 return fold_build2 (EQ_EXPR, type, arg0,
10657 build_int_cst (TREE_TYPE (arg0), 0));
10659 /* Fold (X & Y) ^ Y as ~X & Y. */
10660 if (TREE_CODE (arg0) == BIT_AND_EXPR
10661 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10663 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10664 return fold_build2 (BIT_AND_EXPR, type,
10665 fold_build1 (BIT_NOT_EXPR, type, tem),
10666 fold_convert (type, arg1));
10668 /* Fold (X & Y) ^ X as ~Y & X. */
10669 if (TREE_CODE (arg0) == BIT_AND_EXPR
10670 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10671 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10673 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10674 return fold_build2 (BIT_AND_EXPR, type,
10675 fold_build1 (BIT_NOT_EXPR, type, tem),
10676 fold_convert (type, arg1));
10678 /* Fold X ^ (X & Y) as X & ~Y. */
10679 if (TREE_CODE (arg1) == BIT_AND_EXPR
10680 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10682 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10683 return fold_build2 (BIT_AND_EXPR, type,
10684 fold_convert (type, arg0),
10685 fold_build1 (BIT_NOT_EXPR, type, tem));
10687 /* Fold X ^ (Y & X) as ~Y & X. */
10688 if (TREE_CODE (arg1) == BIT_AND_EXPR
10689 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10690 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10692 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10693 return fold_build2 (BIT_AND_EXPR, type,
10694 fold_build1 (BIT_NOT_EXPR, type, tem),
10695 fold_convert (type, arg0));
10698 /* See if this can be simplified into a rotate first. If that
10699 is unsuccessful continue in the association code. */
10703 if (integer_all_onesp (arg1))
10704 return non_lvalue (fold_convert (type, arg0));
10705 if (integer_zerop (arg1))
10706 return omit_one_operand (type, arg1, arg0);
10707 if (operand_equal_p (arg0, arg1, 0))
10708 return non_lvalue (fold_convert (type, arg0));
10710 /* ~X & X is always zero. */
10711 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10712 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10713 return omit_one_operand (type, integer_zero_node, arg1);
10715 /* X & ~X is always zero. */
10716 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10717 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10718 return omit_one_operand (type, integer_zero_node, arg0);
10720 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10721 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10722 && TREE_CODE (arg1) == INTEGER_CST
10723 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10725 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10726 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10727 TREE_OPERAND (arg0, 0), tmp1);
10728 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10729 TREE_OPERAND (arg0, 1), tmp1);
10730 return fold_convert (type,
10731 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10735 /* (X | Y) & Y is (X, Y). */
10736 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10737 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10738 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10739 /* (X | Y) & X is (Y, X). */
10740 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10741 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10742 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10743 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10744 /* X & (X | Y) is (Y, X). */
10745 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10746 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10747 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10748 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10749 /* X & (Y | X) is (Y, X). */
10750 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10751 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10752 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10753 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10755 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10756 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10757 && integer_onep (TREE_OPERAND (arg0, 1))
10758 && integer_onep (arg1))
10760 tem = TREE_OPERAND (arg0, 0);
10761 return fold_build2 (EQ_EXPR, type,
10762 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10763 build_int_cst (TREE_TYPE (tem), 1)),
10764 build_int_cst (TREE_TYPE (tem), 0));
10766 /* Fold ~X & 1 as (X & 1) == 0. */
10767 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10768 && integer_onep (arg1))
10770 tem = TREE_OPERAND (arg0, 0);
10771 return fold_build2 (EQ_EXPR, type,
10772 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10773 build_int_cst (TREE_TYPE (tem), 1)),
10774 build_int_cst (TREE_TYPE (tem), 0));
10777 /* Fold (X ^ Y) & Y as ~X & Y. */
10778 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10779 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10781 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10782 return fold_build2 (BIT_AND_EXPR, type,
10783 fold_build1 (BIT_NOT_EXPR, type, tem),
10784 fold_convert (type, arg1));
10786 /* Fold (X ^ Y) & X as ~Y & X. */
10787 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10788 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10789 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10791 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10792 return fold_build2 (BIT_AND_EXPR, type,
10793 fold_build1 (BIT_NOT_EXPR, type, tem),
10794 fold_convert (type, arg1));
10796 /* Fold X & (X ^ Y) as X & ~Y. */
10797 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10798 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10800 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10801 return fold_build2 (BIT_AND_EXPR, type,
10802 fold_convert (type, arg0),
10803 fold_build1 (BIT_NOT_EXPR, type, tem));
10805 /* Fold X & (Y ^ X) as ~Y & X. */
10806 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10807 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10808 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10810 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10811 return fold_build2 (BIT_AND_EXPR, type,
10812 fold_build1 (BIT_NOT_EXPR, type, tem),
10813 fold_convert (type, arg0));
10816 t1 = distribute_bit_expr (code, type, arg0, arg1);
10817 if (t1 != NULL_TREE)
10819 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10820 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10821 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10824 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10826 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10827 && (~TREE_INT_CST_LOW (arg1)
10828 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10829 return fold_convert (type, TREE_OPERAND (arg0, 0));
10832 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10834 This results in more efficient code for machines without a NOR
10835 instruction. Combine will canonicalize to the first form
10836 which will allow use of NOR instructions provided by the
10837 backend if they exist. */
10838 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10839 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10841 return fold_build1 (BIT_NOT_EXPR, type,
10842 build2 (BIT_IOR_EXPR, type,
10843 fold_convert (type,
10844 TREE_OPERAND (arg0, 0)),
10845 fold_convert (type,
10846 TREE_OPERAND (arg1, 0))));
10849 /* If arg0 is derived from the address of an object or function, we may
10850 be able to fold this expression using the object or function's
10852 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10854 unsigned HOST_WIDE_INT modulus, residue;
10855 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10857 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10859 /* This works because modulus is a power of 2. If this weren't the
10860 case, we'd have to replace it by its greatest power-of-2
10861 divisor: modulus & -modulus. */
10863 return build_int_cst (type, residue & low);
10866 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10867 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10868 if the new mask might be further optimized. */
10869 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10870 || TREE_CODE (arg0) == RSHIFT_EXPR)
10871 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10872 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10873 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10874 < TYPE_PRECISION (TREE_TYPE (arg0))
10875 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10876 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10878 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10879 unsigned HOST_WIDE_INT mask
10880 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10881 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10882 tree shift_type = TREE_TYPE (arg0);
10884 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10885 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10886 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10887 && TYPE_PRECISION (TREE_TYPE (arg0))
10888 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10890 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10891 tree arg00 = TREE_OPERAND (arg0, 0);
10892 /* See if more bits can be proven as zero because of
10894 if (TREE_CODE (arg00) == NOP_EXPR
10895 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10897 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10898 if (TYPE_PRECISION (inner_type)
10899 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10900 && TYPE_PRECISION (inner_type) < prec)
10902 prec = TYPE_PRECISION (inner_type);
10903 /* See if we can shorten the right shift. */
10905 shift_type = inner_type;
10908 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10909 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10910 zerobits <<= prec - shiftc;
10911 /* For arithmetic shift if sign bit could be set, zerobits
10912 can contain actually sign bits, so no transformation is
10913 possible, unless MASK masks them all away. In that
10914 case the shift needs to be converted into logical shift. */
10915 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10916 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10918 if ((mask & zerobits) == 0)
10919 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10925 /* ((X << 16) & 0xff00) is (X, 0). */
10926 if ((mask & zerobits) == mask)
10927 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10929 newmask = mask | zerobits;
10930 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10934 /* Only do the transformation if NEWMASK is some integer
10936 for (prec = BITS_PER_UNIT;
10937 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10938 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10940 if (prec < HOST_BITS_PER_WIDE_INT
10941 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10943 if (shift_type != TREE_TYPE (arg0))
10945 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10946 fold_convert (shift_type,
10947 TREE_OPERAND (arg0, 0)),
10948 TREE_OPERAND (arg0, 1));
10949 tem = fold_convert (type, tem);
10953 return fold_build2 (BIT_AND_EXPR, type, tem,
10954 build_int_cst_type (TREE_TYPE (op1),
10963 /* Don't touch a floating-point divide by zero unless the mode
10964 of the constant can represent infinity. */
10965 if (TREE_CODE (arg1) == REAL_CST
10966 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10967 && real_zerop (arg1))
10970 /* Optimize A / A to 1.0 if we don't care about
10971 NaNs or Infinities. Skip the transformation
10972 for non-real operands. */
10973 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10974 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10975 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10976 && operand_equal_p (arg0, arg1, 0))
10978 tree r = build_real (TREE_TYPE (arg0), dconst1);
10980 return omit_two_operands (type, r, arg0, arg1);
10983 /* The complex version of the above A / A optimization. */
10984 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10985 && operand_equal_p (arg0, arg1, 0))
10987 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10988 if (! HONOR_NANS (TYPE_MODE (elem_type))
10989 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10991 tree r = build_real (elem_type, dconst1);
10992 /* omit_two_operands will call fold_convert for us. */
10993 return omit_two_operands (type, r, arg0, arg1);
10997 /* (-A) / (-B) -> A / B */
10998 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10999 return fold_build2 (RDIV_EXPR, type,
11000 TREE_OPERAND (arg0, 0),
11001 negate_expr (arg1));
11002 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11003 return fold_build2 (RDIV_EXPR, type,
11004 negate_expr (arg0),
11005 TREE_OPERAND (arg1, 0));
11007 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11008 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11009 && real_onep (arg1))
11010 return non_lvalue (fold_convert (type, arg0));
11012 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11013 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11014 && real_minus_onep (arg1))
11015 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11017 /* If ARG1 is a constant, we can convert this to a multiply by the
11018 reciprocal. This does not have the same rounding properties,
11019 so only do this if -freciprocal-math. We can actually
11020 always safely do it if ARG1 is a power of two, but it's hard to
11021 tell if it is or not in a portable manner. */
11022 if (TREE_CODE (arg1) == REAL_CST)
11024 if (flag_reciprocal_math
11025 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11027 return fold_build2 (MULT_EXPR, type, arg0, tem);
11028 /* Find the reciprocal if optimizing and the result is exact. */
11032 r = TREE_REAL_CST (arg1);
11033 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11035 tem = build_real (type, r);
11036 return fold_build2 (MULT_EXPR, type,
11037 fold_convert (type, arg0), tem);
11041 /* Convert A/B/C to A/(B*C). */
11042 if (flag_reciprocal_math
11043 && TREE_CODE (arg0) == RDIV_EXPR)
11044 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11045 fold_build2 (MULT_EXPR, type,
11046 TREE_OPERAND (arg0, 1), arg1));
11048 /* Convert A/(B/C) to (A/B)*C. */
11049 if (flag_reciprocal_math
11050 && TREE_CODE (arg1) == RDIV_EXPR)
11051 return fold_build2 (MULT_EXPR, type,
11052 fold_build2 (RDIV_EXPR, type, arg0,
11053 TREE_OPERAND (arg1, 0)),
11054 TREE_OPERAND (arg1, 1));
11056 /* Convert C1/(X*C2) into (C1/C2)/X. */
11057 if (flag_reciprocal_math
11058 && TREE_CODE (arg1) == MULT_EXPR
11059 && TREE_CODE (arg0) == REAL_CST
11060 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11062 tree tem = const_binop (RDIV_EXPR, arg0,
11063 TREE_OPERAND (arg1, 1), 0);
11065 return fold_build2 (RDIV_EXPR, type, tem,
11066 TREE_OPERAND (arg1, 0));
11069 if (flag_unsafe_math_optimizations)
11071 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11072 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11074 /* Optimize sin(x)/cos(x) as tan(x). */
11075 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11076 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11077 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11078 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11079 CALL_EXPR_ARG (arg1, 0), 0))
11081 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11083 if (tanfn != NULL_TREE)
11084 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11087 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11088 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11089 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11090 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11091 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11092 CALL_EXPR_ARG (arg1, 0), 0))
11094 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11096 if (tanfn != NULL_TREE)
11098 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11099 return fold_build2 (RDIV_EXPR, type,
11100 build_real (type, dconst1), tmp);
11104 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11105 NaNs or Infinities. */
11106 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11107 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11108 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11110 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11111 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11113 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11114 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11115 && operand_equal_p (arg00, arg01, 0))
11117 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11119 if (cosfn != NULL_TREE)
11120 return build_call_expr (cosfn, 1, arg00);
11124 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11125 NaNs or Infinities. */
11126 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11127 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11128 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11130 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11131 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11133 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11134 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11135 && operand_equal_p (arg00, arg01, 0))
11137 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11139 if (cosfn != NULL_TREE)
11141 tree tmp = build_call_expr (cosfn, 1, arg00);
11142 return fold_build2 (RDIV_EXPR, type,
11143 build_real (type, dconst1),
11149 /* Optimize pow(x,c)/x as pow(x,c-1). */
11150 if (fcode0 == BUILT_IN_POW
11151 || fcode0 == BUILT_IN_POWF
11152 || fcode0 == BUILT_IN_POWL)
11154 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11155 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11156 if (TREE_CODE (arg01) == REAL_CST
11157 && !TREE_OVERFLOW (arg01)
11158 && operand_equal_p (arg1, arg00, 0))
11160 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11164 c = TREE_REAL_CST (arg01);
11165 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11166 arg = build_real (type, c);
11167 return build_call_expr (powfn, 2, arg1, arg);
11171 /* Optimize a/root(b/c) into a*root(c/b). */
11172 if (BUILTIN_ROOT_P (fcode1))
11174 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11176 if (TREE_CODE (rootarg) == RDIV_EXPR)
11178 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11179 tree b = TREE_OPERAND (rootarg, 0);
11180 tree c = TREE_OPERAND (rootarg, 1);
11182 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11184 tmp = build_call_expr (rootfn, 1, tmp);
11185 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11189 /* Optimize x/expN(y) into x*expN(-y). */
11190 if (BUILTIN_EXPONENT_P (fcode1))
11192 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11193 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11194 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11195 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11198 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11199 if (fcode1 == BUILT_IN_POW
11200 || fcode1 == BUILT_IN_POWF
11201 || fcode1 == BUILT_IN_POWL)
11203 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11204 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11205 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11206 tree neg11 = fold_convert (type, negate_expr (arg11));
11207 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11208 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11213 case TRUNC_DIV_EXPR:
11214 case FLOOR_DIV_EXPR:
11215 /* Simplify A / (B << N) where A and B are positive and B is
11216 a power of 2, to A >> (N + log2(B)). */
11217 strict_overflow_p = false;
11218 if (TREE_CODE (arg1) == LSHIFT_EXPR
11219 && (TYPE_UNSIGNED (type)
11220 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11222 tree sval = TREE_OPERAND (arg1, 0);
11223 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11225 tree sh_cnt = TREE_OPERAND (arg1, 1);
11226 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11228 if (strict_overflow_p)
11229 fold_overflow_warning (("assuming signed overflow does not "
11230 "occur when simplifying A / (B << N)"),
11231 WARN_STRICT_OVERFLOW_MISC);
11233 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11234 sh_cnt, build_int_cst (NULL_TREE, pow2));
11235 return fold_build2 (RSHIFT_EXPR, type,
11236 fold_convert (type, arg0), sh_cnt);
11240 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11241 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11242 if (INTEGRAL_TYPE_P (type)
11243 && TYPE_UNSIGNED (type)
11244 && code == FLOOR_DIV_EXPR)
11245 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11249 case ROUND_DIV_EXPR:
11250 case CEIL_DIV_EXPR:
11251 case EXACT_DIV_EXPR:
11252 if (integer_onep (arg1))
11253 return non_lvalue (fold_convert (type, arg0));
11254 if (integer_zerop (arg1))
11256 /* X / -1 is -X. */
11257 if (!TYPE_UNSIGNED (type)
11258 && TREE_CODE (arg1) == INTEGER_CST
11259 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11260 && TREE_INT_CST_HIGH (arg1) == -1)
11261 return fold_convert (type, negate_expr (arg0));
11263 /* Convert -A / -B to A / B when the type is signed and overflow is
11265 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11266 && TREE_CODE (arg0) == NEGATE_EXPR
11267 && negate_expr_p (arg1))
11269 if (INTEGRAL_TYPE_P (type))
11270 fold_overflow_warning (("assuming signed overflow does not occur "
11271 "when distributing negation across "
11273 WARN_STRICT_OVERFLOW_MISC);
11274 return fold_build2 (code, type,
11275 fold_convert (type, TREE_OPERAND (arg0, 0)),
11276 negate_expr (arg1));
11278 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11279 && TREE_CODE (arg1) == NEGATE_EXPR
11280 && negate_expr_p (arg0))
11282 if (INTEGRAL_TYPE_P (type))
11283 fold_overflow_warning (("assuming signed overflow does not occur "
11284 "when distributing negation across "
11286 WARN_STRICT_OVERFLOW_MISC);
11287 return fold_build2 (code, type, negate_expr (arg0),
11288 TREE_OPERAND (arg1, 0));
11291 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11292 operation, EXACT_DIV_EXPR.
11294 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11295 At one time others generated faster code, it's not clear if they do
11296 after the last round to changes to the DIV code in expmed.c. */
11297 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11298 && multiple_of_p (type, arg0, arg1))
11299 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11301 strict_overflow_p = false;
11302 if (TREE_CODE (arg1) == INTEGER_CST
11303 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11304 &strict_overflow_p)))
11306 if (strict_overflow_p)
11307 fold_overflow_warning (("assuming signed overflow does not occur "
11308 "when simplifying division"),
11309 WARN_STRICT_OVERFLOW_MISC);
11310 return fold_convert (type, tem);
11315 case CEIL_MOD_EXPR:
11316 case FLOOR_MOD_EXPR:
11317 case ROUND_MOD_EXPR:
11318 case TRUNC_MOD_EXPR:
11319 /* X % 1 is always zero, but be sure to preserve any side
11321 if (integer_onep (arg1))
11322 return omit_one_operand (type, integer_zero_node, arg0);
11324 /* X % 0, return X % 0 unchanged so that we can get the
11325 proper warnings and errors. */
11326 if (integer_zerop (arg1))
11329 /* 0 % X is always zero, but be sure to preserve any side
11330 effects in X. Place this after checking for X == 0. */
11331 if (integer_zerop (arg0))
11332 return omit_one_operand (type, integer_zero_node, arg1);
11334 /* X % -1 is zero. */
11335 if (!TYPE_UNSIGNED (type)
11336 && TREE_CODE (arg1) == INTEGER_CST
11337 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11338 && TREE_INT_CST_HIGH (arg1) == -1)
11339 return omit_one_operand (type, integer_zero_node, arg0);
11341 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11342 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11343 strict_overflow_p = false;
11344 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11345 && (TYPE_UNSIGNED (type)
11346 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11349 /* Also optimize A % (C << N) where C is a power of 2,
11350 to A & ((C << N) - 1). */
11351 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11352 c = TREE_OPERAND (arg1, 0);
11354 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11356 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11357 build_int_cst (TREE_TYPE (arg1), 1));
11358 if (strict_overflow_p)
11359 fold_overflow_warning (("assuming signed overflow does not "
11360 "occur when simplifying "
11361 "X % (power of two)"),
11362 WARN_STRICT_OVERFLOW_MISC);
11363 return fold_build2 (BIT_AND_EXPR, type,
11364 fold_convert (type, arg0),
11365 fold_convert (type, mask));
11369 /* X % -C is the same as X % C. */
11370 if (code == TRUNC_MOD_EXPR
11371 && !TYPE_UNSIGNED (type)
11372 && TREE_CODE (arg1) == INTEGER_CST
11373 && !TREE_OVERFLOW (arg1)
11374 && TREE_INT_CST_HIGH (arg1) < 0
11375 && !TYPE_OVERFLOW_TRAPS (type)
11376 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11377 && !sign_bit_p (arg1, arg1))
11378 return fold_build2 (code, type, fold_convert (type, arg0),
11379 fold_convert (type, negate_expr (arg1)));
11381 /* X % -Y is the same as X % Y. */
11382 if (code == TRUNC_MOD_EXPR
11383 && !TYPE_UNSIGNED (type)
11384 && TREE_CODE (arg1) == NEGATE_EXPR
11385 && !TYPE_OVERFLOW_TRAPS (type))
11386 return fold_build2 (code, type, fold_convert (type, arg0),
11387 fold_convert (type, TREE_OPERAND (arg1, 0)));
11389 if (TREE_CODE (arg1) == INTEGER_CST
11390 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11391 &strict_overflow_p)))
11393 if (strict_overflow_p)
11394 fold_overflow_warning (("assuming signed overflow does not occur "
11395 "when simplifying modulos"),
11396 WARN_STRICT_OVERFLOW_MISC);
11397 return fold_convert (type, tem);
11404 if (integer_all_onesp (arg0))
11405 return omit_one_operand (type, arg0, arg1);
11409 /* Optimize -1 >> x for arithmetic right shifts. */
11410 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11411 return omit_one_operand (type, arg0, arg1);
11412 /* ... fall through ... */
11416 if (integer_zerop (arg1))
11417 return non_lvalue (fold_convert (type, arg0));
11418 if (integer_zerop (arg0))
11419 return omit_one_operand (type, arg0, arg1);
11421 /* Since negative shift count is not well-defined,
11422 don't try to compute it in the compiler. */
11423 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11426 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11427 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11428 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11429 && host_integerp (TREE_OPERAND (arg0, 1), false)
11430 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11432 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11433 + TREE_INT_CST_LOW (arg1));
11435 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11436 being well defined. */
11437 if (low >= TYPE_PRECISION (type))
11439 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11440 low = low % TYPE_PRECISION (type);
11441 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11442 return build_int_cst (type, 0);
11444 low = TYPE_PRECISION (type) - 1;
11447 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11448 build_int_cst (type, low));
11451 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11452 into x & ((unsigned)-1 >> c) for unsigned types. */
11453 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11454 || (TYPE_UNSIGNED (type)
11455 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11456 && host_integerp (arg1, false)
11457 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11458 && host_integerp (TREE_OPERAND (arg0, 1), false)
11459 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11461 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11462 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11468 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11470 lshift = build_int_cst (type, -1);
11471 lshift = int_const_binop (code, lshift, arg1, 0);
11473 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11477 /* Rewrite an LROTATE_EXPR by a constant into an
11478 RROTATE_EXPR by a new constant. */
11479 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11481 tree tem = build_int_cst (TREE_TYPE (arg1),
11482 TYPE_PRECISION (type));
11483 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11484 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11487 /* If we have a rotate of a bit operation with the rotate count and
11488 the second operand of the bit operation both constant,
11489 permute the two operations. */
11490 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11491 && (TREE_CODE (arg0) == BIT_AND_EXPR
11492 || TREE_CODE (arg0) == BIT_IOR_EXPR
11493 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11494 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11495 return fold_build2 (TREE_CODE (arg0), type,
11496 fold_build2 (code, type,
11497 TREE_OPERAND (arg0, 0), arg1),
11498 fold_build2 (code, type,
11499 TREE_OPERAND (arg0, 1), arg1));
11501 /* Two consecutive rotates adding up to the precision of the
11502 type can be ignored. */
11503 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11504 && TREE_CODE (arg0) == RROTATE_EXPR
11505 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11506 && TREE_INT_CST_HIGH (arg1) == 0
11507 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11508 && ((TREE_INT_CST_LOW (arg1)
11509 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11510 == (unsigned int) TYPE_PRECISION (type)))
11511 return TREE_OPERAND (arg0, 0);
11513 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11514 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11515 if the latter can be further optimized. */
11516 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11517 && TREE_CODE (arg0) == BIT_AND_EXPR
11518 && TREE_CODE (arg1) == INTEGER_CST
11519 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11521 tree mask = fold_build2 (code, type,
11522 fold_convert (type, TREE_OPERAND (arg0, 1)),
11524 tree shift = fold_build2 (code, type,
11525 fold_convert (type, TREE_OPERAND (arg0, 0)),
11527 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11535 if (operand_equal_p (arg0, arg1, 0))
11536 return omit_one_operand (type, arg0, arg1);
11537 if (INTEGRAL_TYPE_P (type)
11538 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11539 return omit_one_operand (type, arg1, arg0);
11540 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11546 if (operand_equal_p (arg0, arg1, 0))
11547 return omit_one_operand (type, arg0, arg1);
11548 if (INTEGRAL_TYPE_P (type)
11549 && TYPE_MAX_VALUE (type)
11550 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11551 return omit_one_operand (type, arg1, arg0);
11552 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11557 case TRUTH_ANDIF_EXPR:
11558 /* Note that the operands of this must be ints
11559 and their values must be 0 or 1.
11560 ("true" is a fixed value perhaps depending on the language.) */
11561 /* If first arg is constant zero, return it. */
11562 if (integer_zerop (arg0))
11563 return fold_convert (type, arg0);
11564 case TRUTH_AND_EXPR:
11565 /* If either arg is constant true, drop it. */
11566 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11567 return non_lvalue (fold_convert (type, arg1));
11568 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11569 /* Preserve sequence points. */
11570 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11571 return non_lvalue (fold_convert (type, arg0));
11572 /* If second arg is constant zero, result is zero, but first arg
11573 must be evaluated. */
11574 if (integer_zerop (arg1))
11575 return omit_one_operand (type, arg1, arg0);
11576 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11577 case will be handled here. */
11578 if (integer_zerop (arg0))
11579 return omit_one_operand (type, arg0, arg1);
11581 /* !X && X is always false. */
11582 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11583 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11584 return omit_one_operand (type, integer_zero_node, arg1);
11585 /* X && !X is always false. */
11586 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11587 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11588 return omit_one_operand (type, integer_zero_node, arg0);
11590 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11591 means A >= Y && A != MAX, but in this case we know that
11594 if (!TREE_SIDE_EFFECTS (arg0)
11595 && !TREE_SIDE_EFFECTS (arg1))
11597 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11598 if (tem && !operand_equal_p (tem, arg0, 0))
11599 return fold_build2 (code, type, tem, arg1);
11601 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11602 if (tem && !operand_equal_p (tem, arg1, 0))
11603 return fold_build2 (code, type, arg0, tem);
11607 /* We only do these simplifications if we are optimizing. */
11611 /* Check for things like (A || B) && (A || C). We can convert this
11612 to A || (B && C). Note that either operator can be any of the four
11613 truth and/or operations and the transformation will still be
11614 valid. Also note that we only care about order for the
11615 ANDIF and ORIF operators. If B contains side effects, this
11616 might change the truth-value of A. */
11617 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11618 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11619 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11620 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11621 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11622 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11624 tree a00 = TREE_OPERAND (arg0, 0);
11625 tree a01 = TREE_OPERAND (arg0, 1);
11626 tree a10 = TREE_OPERAND (arg1, 0);
11627 tree a11 = TREE_OPERAND (arg1, 1);
11628 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11629 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11630 && (code == TRUTH_AND_EXPR
11631 || code == TRUTH_OR_EXPR));
11633 if (operand_equal_p (a00, a10, 0))
11634 return fold_build2 (TREE_CODE (arg0), type, a00,
11635 fold_build2 (code, type, a01, a11));
11636 else if (commutative && operand_equal_p (a00, a11, 0))
11637 return fold_build2 (TREE_CODE (arg0), type, a00,
11638 fold_build2 (code, type, a01, a10));
11639 else if (commutative && operand_equal_p (a01, a10, 0))
11640 return fold_build2 (TREE_CODE (arg0), type, a01,
11641 fold_build2 (code, type, a00, a11));
11643 /* This case if tricky because we must either have commutative
11644 operators or else A10 must not have side-effects. */
11646 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11647 && operand_equal_p (a01, a11, 0))
11648 return fold_build2 (TREE_CODE (arg0), type,
11649 fold_build2 (code, type, a00, a10),
11653 /* See if we can build a range comparison. */
11654 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11657 /* Check for the possibility of merging component references. If our
11658 lhs is another similar operation, try to merge its rhs with our
11659 rhs. Then try to merge our lhs and rhs. */
11660 if (TREE_CODE (arg0) == code
11661 && 0 != (tem = fold_truthop (code, type,
11662 TREE_OPERAND (arg0, 1), arg1)))
11663 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11665 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11670 case TRUTH_ORIF_EXPR:
11671 /* Note that the operands of this must be ints
11672 and their values must be 0 or true.
11673 ("true" is a fixed value perhaps depending on the language.) */
11674 /* If first arg is constant true, return it. */
11675 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11676 return fold_convert (type, arg0);
11677 case TRUTH_OR_EXPR:
11678 /* If either arg is constant zero, drop it. */
11679 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11680 return non_lvalue (fold_convert (type, arg1));
11681 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11682 /* Preserve sequence points. */
11683 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11684 return non_lvalue (fold_convert (type, arg0));
11685 /* If second arg is constant true, result is true, but we must
11686 evaluate first arg. */
11687 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11688 return omit_one_operand (type, arg1, arg0);
11689 /* Likewise for first arg, but note this only occurs here for
11691 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11692 return omit_one_operand (type, arg0, arg1);
11694 /* !X || X is always true. */
11695 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11696 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11697 return omit_one_operand (type, integer_one_node, arg1);
11698 /* X || !X is always true. */
11699 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11700 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11701 return omit_one_operand (type, integer_one_node, arg0);
11705 case TRUTH_XOR_EXPR:
11706 /* If the second arg is constant zero, drop it. */
11707 if (integer_zerop (arg1))
11708 return non_lvalue (fold_convert (type, arg0));
11709 /* If the second arg is constant true, this is a logical inversion. */
11710 if (integer_onep (arg1))
11712 /* Only call invert_truthvalue if operand is a truth value. */
11713 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11714 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11716 tem = invert_truthvalue (arg0);
11717 return non_lvalue (fold_convert (type, tem));
11719 /* Identical arguments cancel to zero. */
11720 if (operand_equal_p (arg0, arg1, 0))
11721 return omit_one_operand (type, integer_zero_node, arg0);
11723 /* !X ^ X is always true. */
11724 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11725 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11726 return omit_one_operand (type, integer_one_node, arg1);
11728 /* X ^ !X is always true. */
11729 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11730 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11731 return omit_one_operand (type, integer_one_node, arg0);
11737 tem = fold_comparison (code, type, op0, op1);
11738 if (tem != NULL_TREE)
11741 /* bool_var != 0 becomes bool_var. */
11742 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11743 && code == NE_EXPR)
11744 return non_lvalue (fold_convert (type, arg0));
11746 /* bool_var == 1 becomes bool_var. */
11747 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11748 && code == EQ_EXPR)
11749 return non_lvalue (fold_convert (type, arg0));
11751 /* bool_var != 1 becomes !bool_var. */
11752 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11753 && code == NE_EXPR)
11754 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11756 /* bool_var == 0 becomes !bool_var. */
11757 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11758 && code == EQ_EXPR)
11759 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11761 /* If this is an equality comparison of the address of two non-weak,
11762 unaliased symbols neither of which are extern (since we do not
11763 have access to attributes for externs), then we know the result. */
11764 if (TREE_CODE (arg0) == ADDR_EXPR
11765 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11766 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11767 && ! lookup_attribute ("alias",
11768 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11769 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11770 && TREE_CODE (arg1) == ADDR_EXPR
11771 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11772 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11773 && ! lookup_attribute ("alias",
11774 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11775 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11777 /* We know that we're looking at the address of two
11778 non-weak, unaliased, static _DECL nodes.
11780 It is both wasteful and incorrect to call operand_equal_p
11781 to compare the two ADDR_EXPR nodes. It is wasteful in that
11782 all we need to do is test pointer equality for the arguments
11783 to the two ADDR_EXPR nodes. It is incorrect to use
11784 operand_equal_p as that function is NOT equivalent to a
11785 C equality test. It can in fact return false for two
11786 objects which would test as equal using the C equality
11788 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11789 return constant_boolean_node (equal
11790 ? code == EQ_EXPR : code != EQ_EXPR,
11794 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11795 a MINUS_EXPR of a constant, we can convert it into a comparison with
11796 a revised constant as long as no overflow occurs. */
11797 if (TREE_CODE (arg1) == INTEGER_CST
11798 && (TREE_CODE (arg0) == PLUS_EXPR
11799 || TREE_CODE (arg0) == MINUS_EXPR)
11800 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11801 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11802 ? MINUS_EXPR : PLUS_EXPR,
11803 fold_convert (TREE_TYPE (arg0), arg1),
11804 TREE_OPERAND (arg0, 1), 0))
11805 && !TREE_OVERFLOW (tem))
11806 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11808 /* Similarly for a NEGATE_EXPR. */
11809 if (TREE_CODE (arg0) == NEGATE_EXPR
11810 && TREE_CODE (arg1) == INTEGER_CST
11811 && 0 != (tem = negate_expr (arg1))
11812 && TREE_CODE (tem) == INTEGER_CST
11813 && !TREE_OVERFLOW (tem))
11814 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11816 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11817 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11818 && TREE_CODE (arg1) == INTEGER_CST
11819 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11820 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11821 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11822 fold_convert (TREE_TYPE (arg0), arg1),
11823 TREE_OPERAND (arg0, 1)));
11825 /* Transform comparisons of the form X +- C CMP X. */
11826 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11827 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11828 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11829 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11830 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11832 tree cst = TREE_OPERAND (arg0, 1);
11834 if (code == EQ_EXPR
11835 && !integer_zerop (cst))
11836 return omit_two_operands (type, boolean_false_node,
11837 TREE_OPERAND (arg0, 0), arg1);
11839 return omit_two_operands (type, boolean_true_node,
11840 TREE_OPERAND (arg0, 0), arg1);
11843 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11844 for !=. Don't do this for ordered comparisons due to overflow. */
11845 if (TREE_CODE (arg0) == MINUS_EXPR
11846 && integer_zerop (arg1))
11847 return fold_build2 (code, type,
11848 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11850 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11851 if (TREE_CODE (arg0) == ABS_EXPR
11852 && (integer_zerop (arg1) || real_zerop (arg1)))
11853 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11855 /* If this is an EQ or NE comparison with zero and ARG0 is
11856 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11857 two operations, but the latter can be done in one less insn
11858 on machines that have only two-operand insns or on which a
11859 constant cannot be the first operand. */
11860 if (TREE_CODE (arg0) == BIT_AND_EXPR
11861 && integer_zerop (arg1))
11863 tree arg00 = TREE_OPERAND (arg0, 0);
11864 tree arg01 = TREE_OPERAND (arg0, 1);
11865 if (TREE_CODE (arg00) == LSHIFT_EXPR
11866 && integer_onep (TREE_OPERAND (arg00, 0)))
11868 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11869 arg01, TREE_OPERAND (arg00, 1));
11870 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11871 build_int_cst (TREE_TYPE (arg0), 1));
11872 return fold_build2 (code, type,
11873 fold_convert (TREE_TYPE (arg1), tem), arg1);
11875 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11876 && integer_onep (TREE_OPERAND (arg01, 0)))
11878 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11879 arg00, TREE_OPERAND (arg01, 1));
11880 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11881 build_int_cst (TREE_TYPE (arg0), 1));
11882 return fold_build2 (code, type,
11883 fold_convert (TREE_TYPE (arg1), tem), arg1);
11887 /* If this is an NE or EQ comparison of zero against the result of a
11888 signed MOD operation whose second operand is a power of 2, make
11889 the MOD operation unsigned since it is simpler and equivalent. */
11890 if (integer_zerop (arg1)
11891 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11892 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11893 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11894 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11895 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11896 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11898 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11899 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11900 fold_convert (newtype,
11901 TREE_OPERAND (arg0, 0)),
11902 fold_convert (newtype,
11903 TREE_OPERAND (arg0, 1)));
11905 return fold_build2 (code, type, newmod,
11906 fold_convert (newtype, arg1));
11909 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11910 C1 is a valid shift constant, and C2 is a power of two, i.e.
11912 if (TREE_CODE (arg0) == BIT_AND_EXPR
11913 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11914 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11916 && integer_pow2p (TREE_OPERAND (arg0, 1))
11917 && integer_zerop (arg1))
11919 tree itype = TREE_TYPE (arg0);
11920 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11921 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11923 /* Check for a valid shift count. */
11924 if (TREE_INT_CST_HIGH (arg001) == 0
11925 && TREE_INT_CST_LOW (arg001) < prec)
11927 tree arg01 = TREE_OPERAND (arg0, 1);
11928 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11929 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11930 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11931 can be rewritten as (X & (C2 << C1)) != 0. */
11932 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11934 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11935 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11936 return fold_build2 (code, type, tem, arg1);
11938 /* Otherwise, for signed (arithmetic) shifts,
11939 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11940 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11941 else if (!TYPE_UNSIGNED (itype))
11942 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11943 arg000, build_int_cst (itype, 0));
11944 /* Otherwise, of unsigned (logical) shifts,
11945 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11946 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11948 return omit_one_operand (type,
11949 code == EQ_EXPR ? integer_one_node
11950 : integer_zero_node,
11955 /* If this is an NE comparison of zero with an AND of one, remove the
11956 comparison since the AND will give the correct value. */
11957 if (code == NE_EXPR
11958 && integer_zerop (arg1)
11959 && TREE_CODE (arg0) == BIT_AND_EXPR
11960 && integer_onep (TREE_OPERAND (arg0, 1)))
11961 return fold_convert (type, arg0);
11963 /* If we have (A & C) == C where C is a power of 2, convert this into
11964 (A & C) != 0. Similarly for NE_EXPR. */
11965 if (TREE_CODE (arg0) == BIT_AND_EXPR
11966 && integer_pow2p (TREE_OPERAND (arg0, 1))
11967 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11968 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11969 arg0, fold_convert (TREE_TYPE (arg0),
11970 integer_zero_node));
11972 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11973 bit, then fold the expression into A < 0 or A >= 0. */
11974 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11978 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11979 Similarly for NE_EXPR. */
11980 if (TREE_CODE (arg0) == BIT_AND_EXPR
11981 && TREE_CODE (arg1) == INTEGER_CST
11982 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11984 tree notc = fold_build1 (BIT_NOT_EXPR,
11985 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11986 TREE_OPERAND (arg0, 1));
11987 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11989 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11990 if (integer_nonzerop (dandnotc))
11991 return omit_one_operand (type, rslt, arg0);
11994 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11995 Similarly for NE_EXPR. */
11996 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11997 && TREE_CODE (arg1) == INTEGER_CST
11998 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12000 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12001 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12002 TREE_OPERAND (arg0, 1), notd);
12003 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12004 if (integer_nonzerop (candnotd))
12005 return omit_one_operand (type, rslt, arg0);
12008 /* Optimize comparisons of strlen vs zero to a compare of the
12009 first character of the string vs zero. To wit,
12010 strlen(ptr) == 0 => *ptr == 0
12011 strlen(ptr) != 0 => *ptr != 0
12012 Other cases should reduce to one of these two (or a constant)
12013 due to the return value of strlen being unsigned. */
12014 if (TREE_CODE (arg0) == CALL_EXPR
12015 && integer_zerop (arg1))
12017 tree fndecl = get_callee_fndecl (arg0);
12020 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12021 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12022 && call_expr_nargs (arg0) == 1
12023 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12025 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12026 return fold_build2 (code, type, iref,
12027 build_int_cst (TREE_TYPE (iref), 0));
12031 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12032 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12033 if (TREE_CODE (arg0) == RSHIFT_EXPR
12034 && integer_zerop (arg1)
12035 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12037 tree arg00 = TREE_OPERAND (arg0, 0);
12038 tree arg01 = TREE_OPERAND (arg0, 1);
12039 tree itype = TREE_TYPE (arg00);
12040 if (TREE_INT_CST_HIGH (arg01) == 0
12041 && TREE_INT_CST_LOW (arg01)
12042 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12044 if (TYPE_UNSIGNED (itype))
12046 itype = signed_type_for (itype);
12047 arg00 = fold_convert (itype, arg00);
12049 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12050 type, arg00, build_int_cst (itype, 0));
12054 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12055 if (integer_zerop (arg1)
12056 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12057 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12058 TREE_OPERAND (arg0, 1));
12060 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12061 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12062 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12063 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12064 build_int_cst (TREE_TYPE (arg1), 0));
12065 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12066 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12067 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12068 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12069 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12070 build_int_cst (TREE_TYPE (arg1), 0));
12072 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12073 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12074 && TREE_CODE (arg1) == INTEGER_CST
12075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12076 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12077 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12078 TREE_OPERAND (arg0, 1), arg1));
12080 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12081 (X & C) == 0 when C is a single bit. */
12082 if (TREE_CODE (arg0) == BIT_AND_EXPR
12083 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12084 && integer_zerop (arg1)
12085 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12087 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12088 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12089 TREE_OPERAND (arg0, 1));
12090 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12094 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12095 constant C is a power of two, i.e. a single bit. */
12096 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12097 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12098 && integer_zerop (arg1)
12099 && integer_pow2p (TREE_OPERAND (arg0, 1))
12100 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12101 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12103 tree arg00 = TREE_OPERAND (arg0, 0);
12104 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12105 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12108 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12109 when is C is a power of two, i.e. a single bit. */
12110 if (TREE_CODE (arg0) == BIT_AND_EXPR
12111 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12112 && integer_zerop (arg1)
12113 && integer_pow2p (TREE_OPERAND (arg0, 1))
12114 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12115 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12117 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12118 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12119 arg000, TREE_OPERAND (arg0, 1));
12120 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12121 tem, build_int_cst (TREE_TYPE (tem), 0));
12124 if (integer_zerop (arg1)
12125 && tree_expr_nonzero_p (arg0))
12127 tree res = constant_boolean_node (code==NE_EXPR, type);
12128 return omit_one_operand (type, res, arg0);
12131 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12132 if (TREE_CODE (arg0) == NEGATE_EXPR
12133 && TREE_CODE (arg1) == NEGATE_EXPR)
12134 return fold_build2 (code, type,
12135 TREE_OPERAND (arg0, 0),
12136 TREE_OPERAND (arg1, 0));
12138 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12139 if (TREE_CODE (arg0) == BIT_AND_EXPR
12140 && TREE_CODE (arg1) == BIT_AND_EXPR)
12142 tree arg00 = TREE_OPERAND (arg0, 0);
12143 tree arg01 = TREE_OPERAND (arg0, 1);
12144 tree arg10 = TREE_OPERAND (arg1, 0);
12145 tree arg11 = TREE_OPERAND (arg1, 1);
12146 tree itype = TREE_TYPE (arg0);
12148 if (operand_equal_p (arg01, arg11, 0))
12149 return fold_build2 (code, type,
12150 fold_build2 (BIT_AND_EXPR, itype,
12151 fold_build2 (BIT_XOR_EXPR, itype,
12154 build_int_cst (itype, 0));
12156 if (operand_equal_p (arg01, arg10, 0))
12157 return fold_build2 (code, type,
12158 fold_build2 (BIT_AND_EXPR, itype,
12159 fold_build2 (BIT_XOR_EXPR, itype,
12162 build_int_cst (itype, 0));
12164 if (operand_equal_p (arg00, arg11, 0))
12165 return fold_build2 (code, type,
12166 fold_build2 (BIT_AND_EXPR, itype,
12167 fold_build2 (BIT_XOR_EXPR, itype,
12170 build_int_cst (itype, 0));
12172 if (operand_equal_p (arg00, arg10, 0))
12173 return fold_build2 (code, type,
12174 fold_build2 (BIT_AND_EXPR, itype,
12175 fold_build2 (BIT_XOR_EXPR, itype,
12178 build_int_cst (itype, 0));
12181 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12182 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12184 tree arg00 = TREE_OPERAND (arg0, 0);
12185 tree arg01 = TREE_OPERAND (arg0, 1);
12186 tree arg10 = TREE_OPERAND (arg1, 0);
12187 tree arg11 = TREE_OPERAND (arg1, 1);
12188 tree itype = TREE_TYPE (arg0);
12190 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12191 operand_equal_p guarantees no side-effects so we don't need
12192 to use omit_one_operand on Z. */
12193 if (operand_equal_p (arg01, arg11, 0))
12194 return fold_build2 (code, type, arg00, arg10);
12195 if (operand_equal_p (arg01, arg10, 0))
12196 return fold_build2 (code, type, arg00, arg11);
12197 if (operand_equal_p (arg00, arg11, 0))
12198 return fold_build2 (code, type, arg01, arg10);
12199 if (operand_equal_p (arg00, arg10, 0))
12200 return fold_build2 (code, type, arg01, arg11);
12202 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12203 if (TREE_CODE (arg01) == INTEGER_CST
12204 && TREE_CODE (arg11) == INTEGER_CST)
12205 return fold_build2 (code, type,
12206 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12207 fold_build2 (BIT_XOR_EXPR, itype,
12212 /* Attempt to simplify equality/inequality comparisons of complex
12213 values. Only lower the comparison if the result is known or
12214 can be simplified to a single scalar comparison. */
12215 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12216 || TREE_CODE (arg0) == COMPLEX_CST)
12217 && (TREE_CODE (arg1) == COMPLEX_EXPR
12218 || TREE_CODE (arg1) == COMPLEX_CST))
12220 tree real0, imag0, real1, imag1;
12223 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12225 real0 = TREE_OPERAND (arg0, 0);
12226 imag0 = TREE_OPERAND (arg0, 1);
12230 real0 = TREE_REALPART (arg0);
12231 imag0 = TREE_IMAGPART (arg0);
12234 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12236 real1 = TREE_OPERAND (arg1, 0);
12237 imag1 = TREE_OPERAND (arg1, 1);
12241 real1 = TREE_REALPART (arg1);
12242 imag1 = TREE_IMAGPART (arg1);
12245 rcond = fold_binary (code, type, real0, real1);
12246 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12248 if (integer_zerop (rcond))
12250 if (code == EQ_EXPR)
12251 return omit_two_operands (type, boolean_false_node,
12253 return fold_build2 (NE_EXPR, type, imag0, imag1);
12257 if (code == NE_EXPR)
12258 return omit_two_operands (type, boolean_true_node,
12260 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12264 icond = fold_binary (code, type, imag0, imag1);
12265 if (icond && TREE_CODE (icond) == INTEGER_CST)
12267 if (integer_zerop (icond))
12269 if (code == EQ_EXPR)
12270 return omit_two_operands (type, boolean_false_node,
12272 return fold_build2 (NE_EXPR, type, real0, real1);
12276 if (code == NE_EXPR)
12277 return omit_two_operands (type, boolean_true_node,
12279 return fold_build2 (EQ_EXPR, type, real0, real1);
12290 tem = fold_comparison (code, type, op0, op1);
12291 if (tem != NULL_TREE)
12294 /* Transform comparisons of the form X +- C CMP X. */
12295 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12296 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12297 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12298 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12299 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12300 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12302 tree arg01 = TREE_OPERAND (arg0, 1);
12303 enum tree_code code0 = TREE_CODE (arg0);
12306 if (TREE_CODE (arg01) == REAL_CST)
12307 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12309 is_positive = tree_int_cst_sgn (arg01);
12311 /* (X - c) > X becomes false. */
12312 if (code == GT_EXPR
12313 && ((code0 == MINUS_EXPR && is_positive >= 0)
12314 || (code0 == PLUS_EXPR && is_positive <= 0)))
12316 if (TREE_CODE (arg01) == INTEGER_CST
12317 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12318 fold_overflow_warning (("assuming signed overflow does not "
12319 "occur when assuming that (X - c) > X "
12320 "is always false"),
12321 WARN_STRICT_OVERFLOW_ALL);
12322 return constant_boolean_node (0, type);
12325 /* Likewise (X + c) < X becomes false. */
12326 if (code == LT_EXPR
12327 && ((code0 == PLUS_EXPR && is_positive >= 0)
12328 || (code0 == MINUS_EXPR && is_positive <= 0)))
12330 if (TREE_CODE (arg01) == INTEGER_CST
12331 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12332 fold_overflow_warning (("assuming signed overflow does not "
12333 "occur when assuming that "
12334 "(X + c) < X is always false"),
12335 WARN_STRICT_OVERFLOW_ALL);
12336 return constant_boolean_node (0, type);
12339 /* Convert (X - c) <= X to true. */
12340 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12342 && ((code0 == MINUS_EXPR && is_positive >= 0)
12343 || (code0 == PLUS_EXPR && is_positive <= 0)))
12345 if (TREE_CODE (arg01) == INTEGER_CST
12346 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12347 fold_overflow_warning (("assuming signed overflow does not "
12348 "occur when assuming that "
12349 "(X - c) <= X is always true"),
12350 WARN_STRICT_OVERFLOW_ALL);
12351 return constant_boolean_node (1, type);
12354 /* Convert (X + c) >= X to true. */
12355 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12357 && ((code0 == PLUS_EXPR && is_positive >= 0)
12358 || (code0 == MINUS_EXPR && is_positive <= 0)))
12360 if (TREE_CODE (arg01) == INTEGER_CST
12361 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12362 fold_overflow_warning (("assuming signed overflow does not "
12363 "occur when assuming that "
12364 "(X + c) >= X is always true"),
12365 WARN_STRICT_OVERFLOW_ALL);
12366 return constant_boolean_node (1, type);
12369 if (TREE_CODE (arg01) == INTEGER_CST)
12371 /* Convert X + c > X and X - c < X to true for integers. */
12372 if (code == GT_EXPR
12373 && ((code0 == PLUS_EXPR && is_positive > 0)
12374 || (code0 == MINUS_EXPR && is_positive < 0)))
12376 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12377 fold_overflow_warning (("assuming signed overflow does "
12378 "not occur when assuming that "
12379 "(X + c) > X is always true"),
12380 WARN_STRICT_OVERFLOW_ALL);
12381 return constant_boolean_node (1, type);
12384 if (code == LT_EXPR
12385 && ((code0 == MINUS_EXPR && is_positive > 0)
12386 || (code0 == PLUS_EXPR && is_positive < 0)))
12388 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12389 fold_overflow_warning (("assuming signed overflow does "
12390 "not occur when assuming that "
12391 "(X - c) < X is always true"),
12392 WARN_STRICT_OVERFLOW_ALL);
12393 return constant_boolean_node (1, type);
12396 /* Convert X + c <= X and X - c >= X to false for integers. */
12397 if (code == LE_EXPR
12398 && ((code0 == PLUS_EXPR && is_positive > 0)
12399 || (code0 == MINUS_EXPR && is_positive < 0)))
12401 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12402 fold_overflow_warning (("assuming signed overflow does "
12403 "not occur when assuming that "
12404 "(X + c) <= X is always false"),
12405 WARN_STRICT_OVERFLOW_ALL);
12406 return constant_boolean_node (0, type);
12409 if (code == GE_EXPR
12410 && ((code0 == MINUS_EXPR && is_positive > 0)
12411 || (code0 == PLUS_EXPR && is_positive < 0)))
12413 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12414 fold_overflow_warning (("assuming signed overflow does "
12415 "not occur when assuming that "
12416 "(X - c) >= X is always false"),
12417 WARN_STRICT_OVERFLOW_ALL);
12418 return constant_boolean_node (0, type);
12423 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12424 This transformation affects the cases which are handled in later
12425 optimizations involving comparisons with non-negative constants. */
12426 if (TREE_CODE (arg1) == INTEGER_CST
12427 && TREE_CODE (arg0) != INTEGER_CST
12428 && tree_int_cst_sgn (arg1) > 0)
12430 if (code == GE_EXPR)
12432 arg1 = const_binop (MINUS_EXPR, arg1,
12433 build_int_cst (TREE_TYPE (arg1), 1), 0);
12434 return fold_build2 (GT_EXPR, type, arg0,
12435 fold_convert (TREE_TYPE (arg0), arg1));
12437 if (code == LT_EXPR)
12439 arg1 = const_binop (MINUS_EXPR, arg1,
12440 build_int_cst (TREE_TYPE (arg1), 1), 0);
12441 return fold_build2 (LE_EXPR, type, arg0,
12442 fold_convert (TREE_TYPE (arg0), arg1));
12446 /* Comparisons with the highest or lowest possible integer of
12447 the specified precision will have known values. */
12449 tree arg1_type = TREE_TYPE (arg1);
12450 unsigned int width = TYPE_PRECISION (arg1_type);
12452 if (TREE_CODE (arg1) == INTEGER_CST
12453 && !TREE_OVERFLOW (arg1)
12454 && width <= 2 * HOST_BITS_PER_WIDE_INT
12455 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12457 HOST_WIDE_INT signed_max_hi;
12458 unsigned HOST_WIDE_INT signed_max_lo;
12459 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12461 if (width <= HOST_BITS_PER_WIDE_INT)
12463 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12468 if (TYPE_UNSIGNED (arg1_type))
12470 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12476 max_lo = signed_max_lo;
12477 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12483 width -= HOST_BITS_PER_WIDE_INT;
12484 signed_max_lo = -1;
12485 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12490 if (TYPE_UNSIGNED (arg1_type))
12492 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12497 max_hi = signed_max_hi;
12498 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12502 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12503 && TREE_INT_CST_LOW (arg1) == max_lo)
12507 return omit_one_operand (type, integer_zero_node, arg0);
12510 return fold_build2 (EQ_EXPR, type, op0, op1);
12513 return omit_one_operand (type, integer_one_node, arg0);
12516 return fold_build2 (NE_EXPR, type, op0, op1);
12518 /* The GE_EXPR and LT_EXPR cases above are not normally
12519 reached because of previous transformations. */
12524 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12526 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12530 arg1 = const_binop (PLUS_EXPR, arg1,
12531 build_int_cst (TREE_TYPE (arg1), 1), 0);
12532 return fold_build2 (EQ_EXPR, type,
12533 fold_convert (TREE_TYPE (arg1), arg0),
12536 arg1 = const_binop (PLUS_EXPR, arg1,
12537 build_int_cst (TREE_TYPE (arg1), 1), 0);
12538 return fold_build2 (NE_EXPR, type,
12539 fold_convert (TREE_TYPE (arg1), arg0),
12544 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12546 && TREE_INT_CST_LOW (arg1) == min_lo)
12550 return omit_one_operand (type, integer_zero_node, arg0);
12553 return fold_build2 (EQ_EXPR, type, op0, op1);
12556 return omit_one_operand (type, integer_one_node, arg0);
12559 return fold_build2 (NE_EXPR, type, op0, op1);
12564 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12566 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12570 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12571 return fold_build2 (NE_EXPR, type,
12572 fold_convert (TREE_TYPE (arg1), arg0),
12575 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12576 return fold_build2 (EQ_EXPR, type,
12577 fold_convert (TREE_TYPE (arg1), arg0),
12583 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12584 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12585 && TYPE_UNSIGNED (arg1_type)
12586 /* We will flip the signedness of the comparison operator
12587 associated with the mode of arg1, so the sign bit is
12588 specified by this mode. Check that arg1 is the signed
12589 max associated with this sign bit. */
12590 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12591 /* signed_type does not work on pointer types. */
12592 && INTEGRAL_TYPE_P (arg1_type))
12594 /* The following case also applies to X < signed_max+1
12595 and X >= signed_max+1 because previous transformations. */
12596 if (code == LE_EXPR || code == GT_EXPR)
12599 st = signed_type_for (TREE_TYPE (arg1));
12600 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12601 type, fold_convert (st, arg0),
12602 build_int_cst (st, 0));
12608 /* If we are comparing an ABS_EXPR with a constant, we can
12609 convert all the cases into explicit comparisons, but they may
12610 well not be faster than doing the ABS and one comparison.
12611 But ABS (X) <= C is a range comparison, which becomes a subtraction
12612 and a comparison, and is probably faster. */
12613 if (code == LE_EXPR
12614 && TREE_CODE (arg1) == INTEGER_CST
12615 && TREE_CODE (arg0) == ABS_EXPR
12616 && ! TREE_SIDE_EFFECTS (arg0)
12617 && (0 != (tem = negate_expr (arg1)))
12618 && TREE_CODE (tem) == INTEGER_CST
12619 && !TREE_OVERFLOW (tem))
12620 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12621 build2 (GE_EXPR, type,
12622 TREE_OPERAND (arg0, 0), tem),
12623 build2 (LE_EXPR, type,
12624 TREE_OPERAND (arg0, 0), arg1));
12626 /* Convert ABS_EXPR<x> >= 0 to true. */
12627 strict_overflow_p = false;
12628 if (code == GE_EXPR
12629 && (integer_zerop (arg1)
12630 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12631 && real_zerop (arg1)))
12632 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12634 if (strict_overflow_p)
12635 fold_overflow_warning (("assuming signed overflow does not occur "
12636 "when simplifying comparison of "
12637 "absolute value and zero"),
12638 WARN_STRICT_OVERFLOW_CONDITIONAL);
12639 return omit_one_operand (type, integer_one_node, arg0);
12642 /* Convert ABS_EXPR<x> < 0 to false. */
12643 strict_overflow_p = false;
12644 if (code == LT_EXPR
12645 && (integer_zerop (arg1) || real_zerop (arg1))
12646 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12648 if (strict_overflow_p)
12649 fold_overflow_warning (("assuming signed overflow does not occur "
12650 "when simplifying comparison of "
12651 "absolute value and zero"),
12652 WARN_STRICT_OVERFLOW_CONDITIONAL);
12653 return omit_one_operand (type, integer_zero_node, arg0);
12656 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12657 and similarly for >= into !=. */
12658 if ((code == LT_EXPR || code == GE_EXPR)
12659 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12660 && TREE_CODE (arg1) == LSHIFT_EXPR
12661 && integer_onep (TREE_OPERAND (arg1, 0)))
12662 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12663 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12664 TREE_OPERAND (arg1, 1)),
12665 build_int_cst (TREE_TYPE (arg0), 0));
12667 if ((code == LT_EXPR || code == GE_EXPR)
12668 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12669 && (TREE_CODE (arg1) == NOP_EXPR
12670 || TREE_CODE (arg1) == CONVERT_EXPR)
12671 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12672 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12674 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12675 fold_convert (TREE_TYPE (arg0),
12676 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12677 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12679 build_int_cst (TREE_TYPE (arg0), 0));
12683 case UNORDERED_EXPR:
12691 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12693 t1 = fold_relational_const (code, type, arg0, arg1);
12694 if (t1 != NULL_TREE)
12698 /* If the first operand is NaN, the result is constant. */
12699 if (TREE_CODE (arg0) == REAL_CST
12700 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12701 && (code != LTGT_EXPR || ! flag_trapping_math))
12703 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12704 ? integer_zero_node
12705 : integer_one_node;
12706 return omit_one_operand (type, t1, arg1);
12709 /* If the second operand is NaN, the result is constant. */
12710 if (TREE_CODE (arg1) == REAL_CST
12711 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12712 && (code != LTGT_EXPR || ! flag_trapping_math))
12714 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12715 ? integer_zero_node
12716 : integer_one_node;
12717 return omit_one_operand (type, t1, arg0);
12720 /* Simplify unordered comparison of something with itself. */
12721 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12722 && operand_equal_p (arg0, arg1, 0))
12723 return constant_boolean_node (1, type);
12725 if (code == LTGT_EXPR
12726 && !flag_trapping_math
12727 && operand_equal_p (arg0, arg1, 0))
12728 return constant_boolean_node (0, type);
12730 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12732 tree targ0 = strip_float_extensions (arg0);
12733 tree targ1 = strip_float_extensions (arg1);
12734 tree newtype = TREE_TYPE (targ0);
12736 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12737 newtype = TREE_TYPE (targ1);
12739 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12740 return fold_build2 (code, type, fold_convert (newtype, targ0),
12741 fold_convert (newtype, targ1));
12746 case COMPOUND_EXPR:
12747 /* When pedantic, a compound expression can be neither an lvalue
12748 nor an integer constant expression. */
12749 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12751 /* Don't let (0, 0) be null pointer constant. */
12752 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12753 : fold_convert (type, arg1);
12754 return pedantic_non_lvalue (tem);
12757 if ((TREE_CODE (arg0) == REAL_CST
12758 && TREE_CODE (arg1) == REAL_CST)
12759 || (TREE_CODE (arg0) == INTEGER_CST
12760 && TREE_CODE (arg1) == INTEGER_CST))
12761 return build_complex (type, arg0, arg1);
12765 /* An ASSERT_EXPR should never be passed to fold_binary. */
12766 gcc_unreachable ();
12770 } /* switch (code) */
12773 /* Callback for walk_tree, looking for LABEL_EXPR.
12774 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12775 Do not check the sub-tree of GOTO_EXPR. */
12778 contains_label_1 (tree *tp,
12779 int *walk_subtrees,
12780 void *data ATTRIBUTE_UNUSED)
12782 switch (TREE_CODE (*tp))
12787 *walk_subtrees = 0;
12794 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12795 accessible from outside the sub-tree. Returns NULL_TREE if no
12796 addressable label is found. */
12799 contains_label_p (tree st)
12801 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12804 /* Fold a ternary expression of code CODE and type TYPE with operands
12805 OP0, OP1, and OP2. Return the folded expression if folding is
12806 successful. Otherwise, return NULL_TREE. */
12809 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12812 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12813 enum tree_code_class kind = TREE_CODE_CLASS (code);
12815 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12816 && TREE_CODE_LENGTH (code) == 3);
12818 /* Strip any conversions that don't change the mode. This is safe
12819 for every expression, except for a comparison expression because
12820 its signedness is derived from its operands. So, in the latter
12821 case, only strip conversions that don't change the signedness.
12823 Note that this is done as an internal manipulation within the
12824 constant folder, in order to find the simplest representation of
12825 the arguments so that their form can be studied. In any cases,
12826 the appropriate type conversions should be put back in the tree
12827 that will get out of the constant folder. */
12842 case COMPONENT_REF:
12843 if (TREE_CODE (arg0) == CONSTRUCTOR
12844 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12846 unsigned HOST_WIDE_INT idx;
12848 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12855 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12856 so all simple results must be passed through pedantic_non_lvalue. */
12857 if (TREE_CODE (arg0) == INTEGER_CST)
12859 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12860 tem = integer_zerop (arg0) ? op2 : op1;
12861 /* Only optimize constant conditions when the selected branch
12862 has the same type as the COND_EXPR. This avoids optimizing
12863 away "c ? x : throw", where the throw has a void type.
12864 Avoid throwing away that operand which contains label. */
12865 if ((!TREE_SIDE_EFFECTS (unused_op)
12866 || !contains_label_p (unused_op))
12867 && (! VOID_TYPE_P (TREE_TYPE (tem))
12868 || VOID_TYPE_P (type)))
12869 return pedantic_non_lvalue (tem);
12872 if (operand_equal_p (arg1, op2, 0))
12873 return pedantic_omit_one_operand (type, arg1, arg0);
12875 /* If we have A op B ? A : C, we may be able to convert this to a
12876 simpler expression, depending on the operation and the values
12877 of B and C. Signed zeros prevent all of these transformations,
12878 for reasons given above each one.
12880 Also try swapping the arguments and inverting the conditional. */
12881 if (COMPARISON_CLASS_P (arg0)
12882 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12883 arg1, TREE_OPERAND (arg0, 1))
12884 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12886 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12891 if (COMPARISON_CLASS_P (arg0)
12892 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12894 TREE_OPERAND (arg0, 1))
12895 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12897 tem = fold_truth_not_expr (arg0);
12898 if (tem && COMPARISON_CLASS_P (tem))
12900 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12906 /* If the second operand is simpler than the third, swap them
12907 since that produces better jump optimization results. */
12908 if (truth_value_p (TREE_CODE (arg0))
12909 && tree_swap_operands_p (op1, op2, false))
12911 /* See if this can be inverted. If it can't, possibly because
12912 it was a floating-point inequality comparison, don't do
12914 tem = fold_truth_not_expr (arg0);
12916 return fold_build3 (code, type, tem, op2, op1);
12919 /* Convert A ? 1 : 0 to simply A. */
12920 if (integer_onep (op1)
12921 && integer_zerop (op2)
12922 /* If we try to convert OP0 to our type, the
12923 call to fold will try to move the conversion inside
12924 a COND, which will recurse. In that case, the COND_EXPR
12925 is probably the best choice, so leave it alone. */
12926 && type == TREE_TYPE (arg0))
12927 return pedantic_non_lvalue (arg0);
12929 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12930 over COND_EXPR in cases such as floating point comparisons. */
12931 if (integer_zerop (op1)
12932 && integer_onep (op2)
12933 && truth_value_p (TREE_CODE (arg0)))
12934 return pedantic_non_lvalue (fold_convert (type,
12935 invert_truthvalue (arg0)));
12937 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12938 if (TREE_CODE (arg0) == LT_EXPR
12939 && integer_zerop (TREE_OPERAND (arg0, 1))
12940 && integer_zerop (op2)
12941 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12943 /* sign_bit_p only checks ARG1 bits within A's precision.
12944 If <sign bit of A> has wider type than A, bits outside
12945 of A's precision in <sign bit of A> need to be checked.
12946 If they are all 0, this optimization needs to be done
12947 in unsigned A's type, if they are all 1 in signed A's type,
12948 otherwise this can't be done. */
12949 if (TYPE_PRECISION (TREE_TYPE (tem))
12950 < TYPE_PRECISION (TREE_TYPE (arg1))
12951 && TYPE_PRECISION (TREE_TYPE (tem))
12952 < TYPE_PRECISION (type))
12954 unsigned HOST_WIDE_INT mask_lo;
12955 HOST_WIDE_INT mask_hi;
12956 int inner_width, outer_width;
12959 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12960 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12961 if (outer_width > TYPE_PRECISION (type))
12962 outer_width = TYPE_PRECISION (type);
12964 if (outer_width > HOST_BITS_PER_WIDE_INT)
12966 mask_hi = ((unsigned HOST_WIDE_INT) -1
12967 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12973 mask_lo = ((unsigned HOST_WIDE_INT) -1
12974 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12976 if (inner_width > HOST_BITS_PER_WIDE_INT)
12978 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12979 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12983 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12984 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12986 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12987 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12989 tem_type = signed_type_for (TREE_TYPE (tem));
12990 tem = fold_convert (tem_type, tem);
12992 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12993 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12995 tem_type = unsigned_type_for (TREE_TYPE (tem));
12996 tem = fold_convert (tem_type, tem);
13003 return fold_convert (type,
13004 fold_build2 (BIT_AND_EXPR,
13005 TREE_TYPE (tem), tem,
13006 fold_convert (TREE_TYPE (tem),
13010 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13011 already handled above. */
13012 if (TREE_CODE (arg0) == BIT_AND_EXPR
13013 && integer_onep (TREE_OPERAND (arg0, 1))
13014 && integer_zerop (op2)
13015 && integer_pow2p (arg1))
13017 tree tem = TREE_OPERAND (arg0, 0);
13019 if (TREE_CODE (tem) == RSHIFT_EXPR
13020 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13021 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13022 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13023 return fold_build2 (BIT_AND_EXPR, type,
13024 TREE_OPERAND (tem, 0), arg1);
13027 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13028 is probably obsolete because the first operand should be a
13029 truth value (that's why we have the two cases above), but let's
13030 leave it in until we can confirm this for all front-ends. */
13031 if (integer_zerop (op2)
13032 && TREE_CODE (arg0) == NE_EXPR
13033 && integer_zerop (TREE_OPERAND (arg0, 1))
13034 && integer_pow2p (arg1)
13035 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13036 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13037 arg1, OEP_ONLY_CONST))
13038 return pedantic_non_lvalue (fold_convert (type,
13039 TREE_OPERAND (arg0, 0)));
13041 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13042 if (integer_zerop (op2)
13043 && truth_value_p (TREE_CODE (arg0))
13044 && truth_value_p (TREE_CODE (arg1)))
13045 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13046 fold_convert (type, arg0),
13049 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13050 if (integer_onep (op2)
13051 && truth_value_p (TREE_CODE (arg0))
13052 && truth_value_p (TREE_CODE (arg1)))
13054 /* Only perform transformation if ARG0 is easily inverted. */
13055 tem = fold_truth_not_expr (arg0);
13057 return fold_build2 (TRUTH_ORIF_EXPR, type,
13058 fold_convert (type, tem),
13062 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13063 if (integer_zerop (arg1)
13064 && truth_value_p (TREE_CODE (arg0))
13065 && truth_value_p (TREE_CODE (op2)))
13067 /* Only perform transformation if ARG0 is easily inverted. */
13068 tem = fold_truth_not_expr (arg0);
13070 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13071 fold_convert (type, tem),
13075 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13076 if (integer_onep (arg1)
13077 && truth_value_p (TREE_CODE (arg0))
13078 && truth_value_p (TREE_CODE (op2)))
13079 return fold_build2 (TRUTH_ORIF_EXPR, type,
13080 fold_convert (type, arg0),
13086 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13087 of fold_ternary on them. */
13088 gcc_unreachable ();
13090 case BIT_FIELD_REF:
13091 if ((TREE_CODE (arg0) == VECTOR_CST
13092 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13093 && type == TREE_TYPE (TREE_TYPE (arg0)))
13095 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13096 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13099 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13100 && (idx % width) == 0
13101 && (idx = idx / width)
13102 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13104 tree elements = NULL_TREE;
13106 if (TREE_CODE (arg0) == VECTOR_CST)
13107 elements = TREE_VECTOR_CST_ELTS (arg0);
13110 unsigned HOST_WIDE_INT idx;
13113 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13114 elements = tree_cons (NULL_TREE, value, elements);
13116 while (idx-- > 0 && elements)
13117 elements = TREE_CHAIN (elements);
13119 return TREE_VALUE (elements);
13121 return fold_convert (type, integer_zero_node);
13128 } /* switch (code) */
13131 /* Perform constant folding and related simplification of EXPR.
13132 The related simplifications include x*1 => x, x*0 => 0, etc.,
13133 and application of the associative law.
13134 NOP_EXPR conversions may be removed freely (as long as we
13135 are careful not to change the type of the overall expression).
13136 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13137 but we can constant-fold them if they have constant operands. */
13139 #ifdef ENABLE_FOLD_CHECKING
13140 # define fold(x) fold_1 (x)
13141 static tree fold_1 (tree);
13147 const tree t = expr;
13148 enum tree_code code = TREE_CODE (t);
13149 enum tree_code_class kind = TREE_CODE_CLASS (code);
13152 /* Return right away if a constant. */
13153 if (kind == tcc_constant)
13156 /* CALL_EXPR-like objects with variable numbers of operands are
13157 treated specially. */
13158 if (kind == tcc_vl_exp)
13160 if (code == CALL_EXPR)
13162 tem = fold_call_expr (expr, false);
13163 return tem ? tem : expr;
13168 if (IS_EXPR_CODE_CLASS (kind)
13169 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13171 tree type = TREE_TYPE (t);
13172 tree op0, op1, op2;
13174 switch (TREE_CODE_LENGTH (code))
13177 op0 = TREE_OPERAND (t, 0);
13178 tem = fold_unary (code, type, op0);
13179 return tem ? tem : expr;
13181 op0 = TREE_OPERAND (t, 0);
13182 op1 = TREE_OPERAND (t, 1);
13183 tem = fold_binary (code, type, op0, op1);
13184 return tem ? tem : expr;
13186 op0 = TREE_OPERAND (t, 0);
13187 op1 = TREE_OPERAND (t, 1);
13188 op2 = TREE_OPERAND (t, 2);
13189 tem = fold_ternary (code, type, op0, op1, op2);
13190 return tem ? tem : expr;
13200 tree op0 = TREE_OPERAND (t, 0);
13201 tree op1 = TREE_OPERAND (t, 1);
13203 if (TREE_CODE (op1) == INTEGER_CST
13204 && TREE_CODE (op0) == CONSTRUCTOR
13205 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13207 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13208 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13209 unsigned HOST_WIDE_INT begin = 0;
13211 /* Find a matching index by means of a binary search. */
13212 while (begin != end)
13214 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13215 tree index = VEC_index (constructor_elt, elts, middle)->index;
13217 if (TREE_CODE (index) == INTEGER_CST
13218 && tree_int_cst_lt (index, op1))
13219 begin = middle + 1;
13220 else if (TREE_CODE (index) == INTEGER_CST
13221 && tree_int_cst_lt (op1, index))
13223 else if (TREE_CODE (index) == RANGE_EXPR
13224 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13225 begin = middle + 1;
13226 else if (TREE_CODE (index) == RANGE_EXPR
13227 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13230 return VEC_index (constructor_elt, elts, middle)->value;
13238 return fold (DECL_INITIAL (t));
13242 } /* switch (code) */
13245 #ifdef ENABLE_FOLD_CHECKING
13248 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13249 static void fold_check_failed (const_tree, const_tree);
13250 void print_fold_checksum (const_tree);
13252 /* When --enable-checking=fold, compute a digest of expr before
13253 and after actual fold call to see if fold did not accidentally
13254 change original expr. */
13260 struct md5_ctx ctx;
13261 unsigned char checksum_before[16], checksum_after[16];
13264 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13265 md5_init_ctx (&ctx);
13266 fold_checksum_tree (expr, &ctx, ht);
13267 md5_finish_ctx (&ctx, checksum_before);
13270 ret = fold_1 (expr);
13272 md5_init_ctx (&ctx);
13273 fold_checksum_tree (expr, &ctx, ht);
13274 md5_finish_ctx (&ctx, checksum_after);
13277 if (memcmp (checksum_before, checksum_after, 16))
13278 fold_check_failed (expr, ret);
13284 print_fold_checksum (const_tree expr)
13286 struct md5_ctx ctx;
13287 unsigned char checksum[16], cnt;
13290 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13291 md5_init_ctx (&ctx);
13292 fold_checksum_tree (expr, &ctx, ht);
13293 md5_finish_ctx (&ctx, checksum);
13295 for (cnt = 0; cnt < 16; ++cnt)
13296 fprintf (stderr, "%02x", checksum[cnt]);
13297 putc ('\n', stderr);
13301 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13303 internal_error ("fold check: original tree changed by fold");
13307 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13310 enum tree_code code;
13311 struct tree_function_decl buf;
13316 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13317 <= sizeof (struct tree_function_decl))
13318 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13321 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13325 code = TREE_CODE (expr);
13326 if (TREE_CODE_CLASS (code) == tcc_declaration
13327 && DECL_ASSEMBLER_NAME_SET_P (expr))
13329 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13330 memcpy ((char *) &buf, expr, tree_size (expr));
13331 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13332 expr = (tree) &buf;
13334 else if (TREE_CODE_CLASS (code) == tcc_type
13335 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13336 || TYPE_CACHED_VALUES_P (expr)
13337 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13339 /* Allow these fields to be modified. */
13341 memcpy ((char *) &buf, expr, tree_size (expr));
13342 expr = tmp = (tree) &buf;
13343 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13344 TYPE_POINTER_TO (tmp) = NULL;
13345 TYPE_REFERENCE_TO (tmp) = NULL;
13346 if (TYPE_CACHED_VALUES_P (tmp))
13348 TYPE_CACHED_VALUES_P (tmp) = 0;
13349 TYPE_CACHED_VALUES (tmp) = NULL;
13352 md5_process_bytes (expr, tree_size (expr), ctx);
13353 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13354 if (TREE_CODE_CLASS (code) != tcc_type
13355 && TREE_CODE_CLASS (code) != tcc_declaration
13356 && code != TREE_LIST
13357 && code != SSA_NAME)
13358 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13359 switch (TREE_CODE_CLASS (code))
13365 md5_process_bytes (TREE_STRING_POINTER (expr),
13366 TREE_STRING_LENGTH (expr), ctx);
13369 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13370 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13373 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13379 case tcc_exceptional:
13383 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13384 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13385 expr = TREE_CHAIN (expr);
13386 goto recursive_label;
13389 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13390 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13396 case tcc_expression:
13397 case tcc_reference:
13398 case tcc_comparison:
13401 case tcc_statement:
13403 len = TREE_OPERAND_LENGTH (expr);
13404 for (i = 0; i < len; ++i)
13405 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13407 case tcc_declaration:
13408 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13409 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13410 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13412 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13413 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13414 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13415 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13416 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13418 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13419 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13421 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13423 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13424 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13425 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13429 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13430 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13431 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13432 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13433 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13434 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13435 if (INTEGRAL_TYPE_P (expr)
13436 || SCALAR_FLOAT_TYPE_P (expr))
13438 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13439 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13441 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13442 if (TREE_CODE (expr) == RECORD_TYPE
13443 || TREE_CODE (expr) == UNION_TYPE
13444 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13445 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13446 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13453 /* Helper function for outputting the checksum of a tree T. When
13454 debugging with gdb, you can "define mynext" to be "next" followed
13455 by "call debug_fold_checksum (op0)", then just trace down till the
13459 debug_fold_checksum (const_tree t)
13462 unsigned char checksum[16];
13463 struct md5_ctx ctx;
13464 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13466 md5_init_ctx (&ctx);
13467 fold_checksum_tree (t, &ctx, ht);
13468 md5_finish_ctx (&ctx, checksum);
13471 for (i = 0; i < 16; i++)
13472 fprintf (stderr, "%d ", checksum[i]);
13474 fprintf (stderr, "\n");
13479 /* Fold a unary tree expression with code CODE of type TYPE with an
13480 operand OP0. Return a folded expression if successful. Otherwise,
13481 return a tree expression with code CODE of type TYPE with an
13485 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13488 #ifdef ENABLE_FOLD_CHECKING
13489 unsigned char checksum_before[16], checksum_after[16];
13490 struct md5_ctx ctx;
13493 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13494 md5_init_ctx (&ctx);
13495 fold_checksum_tree (op0, &ctx, ht);
13496 md5_finish_ctx (&ctx, checksum_before);
13500 tem = fold_unary (code, type, op0);
13502 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13504 #ifdef ENABLE_FOLD_CHECKING
13505 md5_init_ctx (&ctx);
13506 fold_checksum_tree (op0, &ctx, ht);
13507 md5_finish_ctx (&ctx, checksum_after);
13510 if (memcmp (checksum_before, checksum_after, 16))
13511 fold_check_failed (op0, tem);
13516 /* Fold a binary tree expression with code CODE of type TYPE with
13517 operands OP0 and OP1. Return a folded expression if successful.
13518 Otherwise, return a tree expression with code CODE of type TYPE
13519 with operands OP0 and OP1. */
13522 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13526 #ifdef ENABLE_FOLD_CHECKING
13527 unsigned char checksum_before_op0[16],
13528 checksum_before_op1[16],
13529 checksum_after_op0[16],
13530 checksum_after_op1[16];
13531 struct md5_ctx ctx;
13534 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13535 md5_init_ctx (&ctx);
13536 fold_checksum_tree (op0, &ctx, ht);
13537 md5_finish_ctx (&ctx, checksum_before_op0);
13540 md5_init_ctx (&ctx);
13541 fold_checksum_tree (op1, &ctx, ht);
13542 md5_finish_ctx (&ctx, checksum_before_op1);
13546 tem = fold_binary (code, type, op0, op1);
13548 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13550 #ifdef ENABLE_FOLD_CHECKING
13551 md5_init_ctx (&ctx);
13552 fold_checksum_tree (op0, &ctx, ht);
13553 md5_finish_ctx (&ctx, checksum_after_op0);
13556 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13557 fold_check_failed (op0, tem);
13559 md5_init_ctx (&ctx);
13560 fold_checksum_tree (op1, &ctx, ht);
13561 md5_finish_ctx (&ctx, checksum_after_op1);
13564 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13565 fold_check_failed (op1, tem);
13570 /* Fold a ternary tree expression with code CODE of type TYPE with
13571 operands OP0, OP1, and OP2. Return a folded expression if
13572 successful. Otherwise, return a tree expression with code CODE of
13573 type TYPE with operands OP0, OP1, and OP2. */
13576 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13580 #ifdef ENABLE_FOLD_CHECKING
13581 unsigned char checksum_before_op0[16],
13582 checksum_before_op1[16],
13583 checksum_before_op2[16],
13584 checksum_after_op0[16],
13585 checksum_after_op1[16],
13586 checksum_after_op2[16];
13587 struct md5_ctx ctx;
13590 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13591 md5_init_ctx (&ctx);
13592 fold_checksum_tree (op0, &ctx, ht);
13593 md5_finish_ctx (&ctx, checksum_before_op0);
13596 md5_init_ctx (&ctx);
13597 fold_checksum_tree (op1, &ctx, ht);
13598 md5_finish_ctx (&ctx, checksum_before_op1);
13601 md5_init_ctx (&ctx);
13602 fold_checksum_tree (op2, &ctx, ht);
13603 md5_finish_ctx (&ctx, checksum_before_op2);
13607 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13608 tem = fold_ternary (code, type, op0, op1, op2);
13610 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13612 #ifdef ENABLE_FOLD_CHECKING
13613 md5_init_ctx (&ctx);
13614 fold_checksum_tree (op0, &ctx, ht);
13615 md5_finish_ctx (&ctx, checksum_after_op0);
13618 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13619 fold_check_failed (op0, tem);
13621 md5_init_ctx (&ctx);
13622 fold_checksum_tree (op1, &ctx, ht);
13623 md5_finish_ctx (&ctx, checksum_after_op1);
13626 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13627 fold_check_failed (op1, tem);
13629 md5_init_ctx (&ctx);
13630 fold_checksum_tree (op2, &ctx, ht);
13631 md5_finish_ctx (&ctx, checksum_after_op2);
13634 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13635 fold_check_failed (op2, tem);
13640 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13641 arguments in ARGARRAY, and a null static chain.
13642 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13643 of type TYPE from the given operands as constructed by build_call_array. */
13646 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13649 #ifdef ENABLE_FOLD_CHECKING
13650 unsigned char checksum_before_fn[16],
13651 checksum_before_arglist[16],
13652 checksum_after_fn[16],
13653 checksum_after_arglist[16];
13654 struct md5_ctx ctx;
13658 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13659 md5_init_ctx (&ctx);
13660 fold_checksum_tree (fn, &ctx, ht);
13661 md5_finish_ctx (&ctx, checksum_before_fn);
13664 md5_init_ctx (&ctx);
13665 for (i = 0; i < nargs; i++)
13666 fold_checksum_tree (argarray[i], &ctx, ht);
13667 md5_finish_ctx (&ctx, checksum_before_arglist);
13671 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13673 #ifdef ENABLE_FOLD_CHECKING
13674 md5_init_ctx (&ctx);
13675 fold_checksum_tree (fn, &ctx, ht);
13676 md5_finish_ctx (&ctx, checksum_after_fn);
13679 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13680 fold_check_failed (fn, tem);
13682 md5_init_ctx (&ctx);
13683 for (i = 0; i < nargs; i++)
13684 fold_checksum_tree (argarray[i], &ctx, ht);
13685 md5_finish_ctx (&ctx, checksum_after_arglist);
13688 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13689 fold_check_failed (NULL_TREE, tem);
13694 /* Perform constant folding and related simplification of initializer
13695 expression EXPR. These behave identically to "fold_buildN" but ignore
13696 potential run-time traps and exceptions that fold must preserve. */
13698 #define START_FOLD_INIT \
13699 int saved_signaling_nans = flag_signaling_nans;\
13700 int saved_trapping_math = flag_trapping_math;\
13701 int saved_rounding_math = flag_rounding_math;\
13702 int saved_trapv = flag_trapv;\
13703 int saved_folding_initializer = folding_initializer;\
13704 flag_signaling_nans = 0;\
13705 flag_trapping_math = 0;\
13706 flag_rounding_math = 0;\
13708 folding_initializer = 1;
13710 #define END_FOLD_INIT \
13711 flag_signaling_nans = saved_signaling_nans;\
13712 flag_trapping_math = saved_trapping_math;\
13713 flag_rounding_math = saved_rounding_math;\
13714 flag_trapv = saved_trapv;\
13715 folding_initializer = saved_folding_initializer;
13718 fold_build1_initializer (enum tree_code code, tree type, tree op)
13723 result = fold_build1 (code, type, op);
13730 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13735 result = fold_build2 (code, type, op0, op1);
13742 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13748 result = fold_build3 (code, type, op0, op1, op2);
13755 fold_build_call_array_initializer (tree type, tree fn,
13756 int nargs, tree *argarray)
13761 result = fold_build_call_array (type, fn, nargs, argarray);
13767 #undef START_FOLD_INIT
13768 #undef END_FOLD_INIT
13770 /* Determine if first argument is a multiple of second argument. Return 0 if
13771 it is not, or we cannot easily determined it to be.
13773 An example of the sort of thing we care about (at this point; this routine
13774 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13775 fold cases do now) is discovering that
13777 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13783 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13785 This code also handles discovering that
13787 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13789 is a multiple of 8 so we don't have to worry about dealing with a
13790 possible remainder.
13792 Note that we *look* inside a SAVE_EXPR only to determine how it was
13793 calculated; it is not safe for fold to do much of anything else with the
13794 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13795 at run time. For example, the latter example above *cannot* be implemented
13796 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13797 evaluation time of the original SAVE_EXPR is not necessarily the same at
13798 the time the new expression is evaluated. The only optimization of this
13799 sort that would be valid is changing
13801 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13805 SAVE_EXPR (I) * SAVE_EXPR (J)
13807 (where the same SAVE_EXPR (J) is used in the original and the
13808 transformed version). */
13811 multiple_of_p (tree type, const_tree top, const_tree bottom)
13813 if (operand_equal_p (top, bottom, 0))
13816 if (TREE_CODE (type) != INTEGER_TYPE)
13819 switch (TREE_CODE (top))
13822 /* Bitwise and provides a power of two multiple. If the mask is
13823 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13824 if (!integer_pow2p (bottom))
13829 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13830 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13834 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13835 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13838 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13842 op1 = TREE_OPERAND (top, 1);
13843 /* const_binop may not detect overflow correctly,
13844 so check for it explicitly here. */
13845 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13846 > TREE_INT_CST_LOW (op1)
13847 && TREE_INT_CST_HIGH (op1) == 0
13848 && 0 != (t1 = fold_convert (type,
13849 const_binop (LSHIFT_EXPR,
13852 && !TREE_OVERFLOW (t1))
13853 return multiple_of_p (type, t1, bottom);
13858 /* Can't handle conversions from non-integral or wider integral type. */
13859 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13860 || (TYPE_PRECISION (type)
13861 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13864 /* .. fall through ... */
13867 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13870 if (TREE_CODE (bottom) != INTEGER_CST
13871 || integer_zerop (bottom)
13872 || (TYPE_UNSIGNED (type)
13873 && (tree_int_cst_sgn (top) < 0
13874 || tree_int_cst_sgn (bottom) < 0)))
13876 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13884 /* Return true if CODE or TYPE is known to be non-negative. */
13887 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13889 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13890 && truth_value_p (code))
13891 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13892 have a signed:1 type (where the value is -1 and 0). */
13897 /* Return true if (CODE OP0) is known to be non-negative. If the return
13898 value is based on the assumption that signed overflow is undefined,
13899 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13900 *STRICT_OVERFLOW_P. */
13903 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13904 bool *strict_overflow_p)
13906 if (TYPE_UNSIGNED (type))
13912 /* We can't return 1 if flag_wrapv is set because
13913 ABS_EXPR<INT_MIN> = INT_MIN. */
13914 if (!INTEGRAL_TYPE_P (type))
13916 if (TYPE_OVERFLOW_UNDEFINED (type))
13918 *strict_overflow_p = true;
13923 case NON_LVALUE_EXPR:
13925 case FIX_TRUNC_EXPR:
13926 return tree_expr_nonnegative_warnv_p (op0,
13927 strict_overflow_p);
13931 tree inner_type = TREE_TYPE (op0);
13932 tree outer_type = type;
13934 if (TREE_CODE (outer_type) == REAL_TYPE)
13936 if (TREE_CODE (inner_type) == REAL_TYPE)
13937 return tree_expr_nonnegative_warnv_p (op0,
13938 strict_overflow_p);
13939 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13941 if (TYPE_UNSIGNED (inner_type))
13943 return tree_expr_nonnegative_warnv_p (op0,
13944 strict_overflow_p);
13947 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13949 if (TREE_CODE (inner_type) == REAL_TYPE)
13950 return tree_expr_nonnegative_warnv_p (op0,
13951 strict_overflow_p);
13952 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13953 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13954 && TYPE_UNSIGNED (inner_type);
13960 return tree_simple_nonnegative_warnv_p (code, type);
13963 /* We don't know sign of `t', so be conservative and return false. */
13967 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13968 value is based on the assumption that signed overflow is undefined,
13969 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13970 *STRICT_OVERFLOW_P. */
13973 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13974 tree op1, bool *strict_overflow_p)
13976 if (TYPE_UNSIGNED (type))
13981 case POINTER_PLUS_EXPR:
13983 if (FLOAT_TYPE_P (type))
13984 return (tree_expr_nonnegative_warnv_p (op0,
13986 && tree_expr_nonnegative_warnv_p (op1,
13987 strict_overflow_p));
13989 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13990 both unsigned and at least 2 bits shorter than the result. */
13991 if (TREE_CODE (type) == INTEGER_TYPE
13992 && TREE_CODE (op0) == NOP_EXPR
13993 && TREE_CODE (op1) == NOP_EXPR)
13995 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13996 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13997 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13998 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14000 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14001 TYPE_PRECISION (inner2)) + 1;
14002 return prec < TYPE_PRECISION (type);
14008 if (FLOAT_TYPE_P (type))
14010 /* x * x for floating point x is always non-negative. */
14011 if (operand_equal_p (op0, op1, 0))
14013 return (tree_expr_nonnegative_warnv_p (op0,
14015 && tree_expr_nonnegative_warnv_p (op1,
14016 strict_overflow_p));
14019 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14020 both unsigned and their total bits is shorter than the result. */
14021 if (TREE_CODE (type) == INTEGER_TYPE
14022 && TREE_CODE (op0) == NOP_EXPR
14023 && TREE_CODE (op1) == NOP_EXPR)
14025 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14026 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14027 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14028 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14029 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14030 < TYPE_PRECISION (type);
14036 return (tree_expr_nonnegative_warnv_p (op0,
14038 || tree_expr_nonnegative_warnv_p (op1,
14039 strict_overflow_p));
14045 case TRUNC_DIV_EXPR:
14046 case CEIL_DIV_EXPR:
14047 case FLOOR_DIV_EXPR:
14048 case ROUND_DIV_EXPR:
14049 return (tree_expr_nonnegative_warnv_p (op0,
14051 && tree_expr_nonnegative_warnv_p (op1,
14052 strict_overflow_p));
14054 case TRUNC_MOD_EXPR:
14055 case CEIL_MOD_EXPR:
14056 case FLOOR_MOD_EXPR:
14057 case ROUND_MOD_EXPR:
14058 return tree_expr_nonnegative_warnv_p (op0,
14059 strict_overflow_p);
14061 return tree_simple_nonnegative_warnv_p (code, type);
14064 /* We don't know sign of `t', so be conservative and return false. */
14068 /* Return true if T is known to be non-negative. If the return
14069 value is based on the assumption that signed overflow is undefined,
14070 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14071 *STRICT_OVERFLOW_P. */
14074 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14076 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14079 switch (TREE_CODE (t))
14082 /* Query VRP to see if it has recorded any information about
14083 the range of this object. */
14084 return ssa_name_nonnegative_p (t);
14087 return tree_int_cst_sgn (t) >= 0;
14090 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14093 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14096 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14098 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14099 strict_overflow_p));
14101 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14104 /* We don't know sign of `t', so be conservative and return false. */
14108 /* Return true if T is known to be non-negative. If the return
14109 value is based on the assumption that signed overflow is undefined,
14110 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14111 *STRICT_OVERFLOW_P. */
14114 tree_call_nonnegative_warnv_p (enum tree_code code, tree type, tree fndecl,
14115 tree arg0, tree arg1, bool *strict_overflow_p)
14117 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14118 switch (DECL_FUNCTION_CODE (fndecl))
14120 CASE_FLT_FN (BUILT_IN_ACOS):
14121 CASE_FLT_FN (BUILT_IN_ACOSH):
14122 CASE_FLT_FN (BUILT_IN_CABS):
14123 CASE_FLT_FN (BUILT_IN_COSH):
14124 CASE_FLT_FN (BUILT_IN_ERFC):
14125 CASE_FLT_FN (BUILT_IN_EXP):
14126 CASE_FLT_FN (BUILT_IN_EXP10):
14127 CASE_FLT_FN (BUILT_IN_EXP2):
14128 CASE_FLT_FN (BUILT_IN_FABS):
14129 CASE_FLT_FN (BUILT_IN_FDIM):
14130 CASE_FLT_FN (BUILT_IN_HYPOT):
14131 CASE_FLT_FN (BUILT_IN_POW10):
14132 CASE_INT_FN (BUILT_IN_FFS):
14133 CASE_INT_FN (BUILT_IN_PARITY):
14134 CASE_INT_FN (BUILT_IN_POPCOUNT):
14135 case BUILT_IN_BSWAP32:
14136 case BUILT_IN_BSWAP64:
14140 CASE_FLT_FN (BUILT_IN_SQRT):
14141 /* sqrt(-0.0) is -0.0. */
14142 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14144 return tree_expr_nonnegative_warnv_p (arg0,
14145 strict_overflow_p);
14147 CASE_FLT_FN (BUILT_IN_ASINH):
14148 CASE_FLT_FN (BUILT_IN_ATAN):
14149 CASE_FLT_FN (BUILT_IN_ATANH):
14150 CASE_FLT_FN (BUILT_IN_CBRT):
14151 CASE_FLT_FN (BUILT_IN_CEIL):
14152 CASE_FLT_FN (BUILT_IN_ERF):
14153 CASE_FLT_FN (BUILT_IN_EXPM1):
14154 CASE_FLT_FN (BUILT_IN_FLOOR):
14155 CASE_FLT_FN (BUILT_IN_FMOD):
14156 CASE_FLT_FN (BUILT_IN_FREXP):
14157 CASE_FLT_FN (BUILT_IN_LCEIL):
14158 CASE_FLT_FN (BUILT_IN_LDEXP):
14159 CASE_FLT_FN (BUILT_IN_LFLOOR):
14160 CASE_FLT_FN (BUILT_IN_LLCEIL):
14161 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14162 CASE_FLT_FN (BUILT_IN_LLRINT):
14163 CASE_FLT_FN (BUILT_IN_LLROUND):
14164 CASE_FLT_FN (BUILT_IN_LRINT):
14165 CASE_FLT_FN (BUILT_IN_LROUND):
14166 CASE_FLT_FN (BUILT_IN_MODF):
14167 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14168 CASE_FLT_FN (BUILT_IN_RINT):
14169 CASE_FLT_FN (BUILT_IN_ROUND):
14170 CASE_FLT_FN (BUILT_IN_SCALB):
14171 CASE_FLT_FN (BUILT_IN_SCALBLN):
14172 CASE_FLT_FN (BUILT_IN_SCALBN):
14173 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14174 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14175 CASE_FLT_FN (BUILT_IN_SINH):
14176 CASE_FLT_FN (BUILT_IN_TANH):
14177 CASE_FLT_FN (BUILT_IN_TRUNC):
14178 /* True if the 1st argument is nonnegative. */
14179 return tree_expr_nonnegative_warnv_p (arg0,
14180 strict_overflow_p);
14182 CASE_FLT_FN (BUILT_IN_FMAX):
14183 /* True if the 1st OR 2nd arguments are nonnegative. */
14184 return (tree_expr_nonnegative_warnv_p (arg0,
14186 || (tree_expr_nonnegative_warnv_p (arg1,
14187 strict_overflow_p)));
14189 CASE_FLT_FN (BUILT_IN_FMIN):
14190 /* True if the 1st AND 2nd arguments are nonnegative. */
14191 return (tree_expr_nonnegative_warnv_p (arg0,
14193 && (tree_expr_nonnegative_warnv_p (arg1,
14194 strict_overflow_p)));
14196 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14197 /* True if the 2nd argument is nonnegative. */
14198 return tree_expr_nonnegative_warnv_p (arg1,
14199 strict_overflow_p);
14201 CASE_FLT_FN (BUILT_IN_POWI):
14202 /* True if the 1st argument is nonnegative or the second
14203 argument is an even integer. */
14204 if (TREE_CODE (arg1) == INTEGER_CST
14205 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14207 return tree_expr_nonnegative_warnv_p (arg0,
14208 strict_overflow_p);
14210 CASE_FLT_FN (BUILT_IN_POW):
14211 /* True if the 1st argument is nonnegative or the second
14212 argument is an even integer valued real. */
14213 if (TREE_CODE (arg1) == REAL_CST)
14218 c = TREE_REAL_CST (arg1);
14219 n = real_to_integer (&c);
14222 REAL_VALUE_TYPE cint;
14223 real_from_integer (&cint, VOIDmode, n,
14224 n < 0 ? -1 : 0, 0);
14225 if (real_identical (&c, &cint))
14229 return tree_expr_nonnegative_warnv_p (arg0,
14230 strict_overflow_p);
14235 return tree_simple_nonnegative_warnv_p (code,
14239 /* Return true if T is known to be non-negative. If the return
14240 value is based on the assumption that signed overflow is undefined,
14241 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14242 *STRICT_OVERFLOW_P. */
14245 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14247 enum tree_code code = TREE_CODE (t);
14248 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14255 tree temp = TARGET_EXPR_SLOT (t);
14256 t = TARGET_EXPR_INITIAL (t);
14258 /* If the initializer is non-void, then it's a normal expression
14259 that will be assigned to the slot. */
14260 if (!VOID_TYPE_P (t))
14261 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14263 /* Otherwise, the initializer sets the slot in some way. One common
14264 way is an assignment statement at the end of the initializer. */
14267 if (TREE_CODE (t) == BIND_EXPR)
14268 t = expr_last (BIND_EXPR_BODY (t));
14269 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14270 || TREE_CODE (t) == TRY_CATCH_EXPR)
14271 t = expr_last (TREE_OPERAND (t, 0));
14272 else if (TREE_CODE (t) == STATEMENT_LIST)
14277 if ((TREE_CODE (t) == MODIFY_EXPR
14278 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14279 && GENERIC_TREE_OPERAND (t, 0) == temp)
14280 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14281 strict_overflow_p);
14288 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14289 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14291 return tree_call_nonnegative_warnv_p (TREE_CODE (t),
14293 get_callee_fndecl (t),
14296 strict_overflow_p);
14298 case COMPOUND_EXPR:
14300 case GIMPLE_MODIFY_STMT:
14301 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14302 strict_overflow_p);
14304 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14305 strict_overflow_p);
14307 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14308 strict_overflow_p);
14311 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14315 /* We don't know sign of `t', so be conservative and return false. */
14319 /* Return true if T is known to be non-negative. If the return
14320 value is based on the assumption that signed overflow is undefined,
14321 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14322 *STRICT_OVERFLOW_P. */
14325 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14327 enum tree_code code;
14328 if (t == error_mark_node)
14331 code = TREE_CODE (t);
14332 switch (TREE_CODE_CLASS (code))
14335 case tcc_comparison:
14336 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14338 TREE_OPERAND (t, 0),
14339 TREE_OPERAND (t, 1),
14340 strict_overflow_p);
14343 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14345 TREE_OPERAND (t, 0),
14346 strict_overflow_p);
14349 case tcc_declaration:
14350 case tcc_reference:
14351 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14359 case TRUTH_AND_EXPR:
14360 case TRUTH_OR_EXPR:
14361 case TRUTH_XOR_EXPR:
14362 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14364 TREE_OPERAND (t, 0),
14365 TREE_OPERAND (t, 1),
14366 strict_overflow_p);
14367 case TRUTH_NOT_EXPR:
14368 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14370 TREE_OPERAND (t, 0),
14371 strict_overflow_p);
14378 case WITH_SIZE_EXPR:
14382 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14385 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14389 /* Return true if `t' is known to be non-negative. Handle warnings
14390 about undefined signed overflow. */
14393 tree_expr_nonnegative_p (tree t)
14395 bool ret, strict_overflow_p;
14397 strict_overflow_p = false;
14398 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14399 if (strict_overflow_p)
14400 fold_overflow_warning (("assuming signed overflow does not occur when "
14401 "determining that expression is always "
14403 WARN_STRICT_OVERFLOW_MISC);
14408 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14409 For floating point we further ensure that T is not denormal.
14410 Similar logic is present in nonzero_address in rtlanal.h.
14412 If the return value is based on the assumption that signed overflow
14413 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14414 change *STRICT_OVERFLOW_P. */
14417 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14418 bool *strict_overflow_p)
14423 return tree_expr_nonzero_warnv_p (op0,
14424 strict_overflow_p);
14428 tree inner_type = TREE_TYPE (op0);
14429 tree outer_type = type;
14431 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14432 && tree_expr_nonzero_warnv_p (op0,
14433 strict_overflow_p));
14437 case NON_LVALUE_EXPR:
14438 return tree_expr_nonzero_warnv_p (op0,
14439 strict_overflow_p);
14448 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14449 For floating point we further ensure that T is not denormal.
14450 Similar logic is present in nonzero_address in rtlanal.h.
14452 If the return value is based on the assumption that signed overflow
14453 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14454 change *STRICT_OVERFLOW_P. */
14457 tree_binary_nonzero_warnv_p (enum tree_code code,
14460 tree op1, bool *strict_overflow_p)
14462 bool sub_strict_overflow_p;
14465 case POINTER_PLUS_EXPR:
14467 if (TYPE_OVERFLOW_UNDEFINED (type))
14469 /* With the presence of negative values it is hard
14470 to say something. */
14471 sub_strict_overflow_p = false;
14472 if (!tree_expr_nonnegative_warnv_p (op0,
14473 &sub_strict_overflow_p)
14474 || !tree_expr_nonnegative_warnv_p (op1,
14475 &sub_strict_overflow_p))
14477 /* One of operands must be positive and the other non-negative. */
14478 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14479 overflows, on a twos-complement machine the sum of two
14480 nonnegative numbers can never be zero. */
14481 return (tree_expr_nonzero_warnv_p (op0,
14483 || tree_expr_nonzero_warnv_p (op1,
14484 strict_overflow_p));
14489 if (TYPE_OVERFLOW_UNDEFINED (type))
14491 if (tree_expr_nonzero_warnv_p (op0,
14493 && tree_expr_nonzero_warnv_p (op1,
14494 strict_overflow_p))
14496 *strict_overflow_p = true;
14503 sub_strict_overflow_p = false;
14504 if (tree_expr_nonzero_warnv_p (op0,
14505 &sub_strict_overflow_p)
14506 && tree_expr_nonzero_warnv_p (op1,
14507 &sub_strict_overflow_p))
14509 if (sub_strict_overflow_p)
14510 *strict_overflow_p = true;
14515 sub_strict_overflow_p = false;
14516 if (tree_expr_nonzero_warnv_p (op0,
14517 &sub_strict_overflow_p))
14519 if (sub_strict_overflow_p)
14520 *strict_overflow_p = true;
14522 /* When both operands are nonzero, then MAX must be too. */
14523 if (tree_expr_nonzero_warnv_p (op1,
14524 strict_overflow_p))
14527 /* MAX where operand 0 is positive is positive. */
14528 return tree_expr_nonnegative_warnv_p (op0,
14529 strict_overflow_p);
14531 /* MAX where operand 1 is positive is positive. */
14532 else if (tree_expr_nonzero_warnv_p (op1,
14533 &sub_strict_overflow_p)
14534 && tree_expr_nonnegative_warnv_p (op1,
14535 &sub_strict_overflow_p))
14537 if (sub_strict_overflow_p)
14538 *strict_overflow_p = true;
14544 return (tree_expr_nonzero_warnv_p (op1,
14546 || tree_expr_nonzero_warnv_p (op0,
14547 strict_overflow_p));
14556 /* Return true when T is an address and is known to be nonzero.
14557 For floating point we further ensure that T is not denormal.
14558 Similar logic is present in nonzero_address in rtlanal.h.
14560 If the return value is based on the assumption that signed overflow
14561 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14562 change *STRICT_OVERFLOW_P. */
14565 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14567 bool sub_strict_overflow_p;
14568 switch (TREE_CODE (t))
14571 /* Query VRP to see if it has recorded any information about
14572 the range of this object. */
14573 return ssa_name_nonzero_p (t);
14576 return !integer_zerop (t);
14580 tree base = get_base_address (TREE_OPERAND (t, 0));
14585 /* Weak declarations may link to NULL. */
14586 if (VAR_OR_FUNCTION_DECL_P (base))
14587 return !DECL_WEAK (base);
14589 /* Constants are never weak. */
14590 if (CONSTANT_CLASS_P (base))
14597 sub_strict_overflow_p = false;
14598 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14599 &sub_strict_overflow_p)
14600 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14601 &sub_strict_overflow_p))
14603 if (sub_strict_overflow_p)
14604 *strict_overflow_p = true;
14615 /* Return true when T is an address and is known to be nonzero.
14616 For floating point we further ensure that T is not denormal.
14617 Similar logic is present in nonzero_address in rtlanal.h.
14619 If the return value is based on the assumption that signed overflow
14620 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14621 change *STRICT_OVERFLOW_P. */
14624 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14626 tree type = TREE_TYPE (t);
14627 enum tree_code code;
14629 /* Doing something useful for floating point would need more work. */
14630 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14633 code = TREE_CODE (t);
14634 switch (TREE_CODE_CLASS (code))
14637 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14638 strict_overflow_p);
14640 case tcc_comparison:
14641 return tree_binary_nonzero_warnv_p (code, type,
14642 TREE_OPERAND (t, 0),
14643 TREE_OPERAND (t, 1),
14644 strict_overflow_p);
14646 case tcc_declaration:
14647 case tcc_reference:
14648 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14656 case TRUTH_NOT_EXPR:
14657 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14658 strict_overflow_p);
14660 case TRUTH_AND_EXPR:
14661 case TRUTH_OR_EXPR:
14662 case TRUTH_XOR_EXPR:
14663 return tree_binary_nonzero_warnv_p (code, type,
14664 TREE_OPERAND (t, 0),
14665 TREE_OPERAND (t, 1),
14666 strict_overflow_p);
14673 case WITH_SIZE_EXPR:
14677 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14679 case COMPOUND_EXPR:
14681 case GIMPLE_MODIFY_STMT:
14683 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14684 strict_overflow_p);
14687 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14688 strict_overflow_p);
14691 return alloca_call_p (t);
14699 /* Return true when T is an address and is known to be nonzero.
14700 Handle warnings about undefined signed overflow. */
14703 tree_expr_nonzero_p (tree t)
14705 bool ret, strict_overflow_p;
14707 strict_overflow_p = false;
14708 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14709 if (strict_overflow_p)
14710 fold_overflow_warning (("assuming signed overflow does not occur when "
14711 "determining that expression is always "
14713 WARN_STRICT_OVERFLOW_MISC);
14717 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14718 attempt to fold the expression to a constant without modifying TYPE,
14721 If the expression could be simplified to a constant, then return
14722 the constant. If the expression would not be simplified to a
14723 constant, then return NULL_TREE. */
14726 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14728 tree tem = fold_binary (code, type, op0, op1);
14729 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14732 /* Given the components of a unary expression CODE, TYPE and OP0,
14733 attempt to fold the expression to a constant without modifying
14736 If the expression could be simplified to a constant, then return
14737 the constant. If the expression would not be simplified to a
14738 constant, then return NULL_TREE. */
14741 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14743 tree tem = fold_unary (code, type, op0);
14744 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14747 /* If EXP represents referencing an element in a constant string
14748 (either via pointer arithmetic or array indexing), return the
14749 tree representing the value accessed, otherwise return NULL. */
14752 fold_read_from_constant_string (tree exp)
14754 if ((TREE_CODE (exp) == INDIRECT_REF
14755 || TREE_CODE (exp) == ARRAY_REF)
14756 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14758 tree exp1 = TREE_OPERAND (exp, 0);
14762 if (TREE_CODE (exp) == INDIRECT_REF)
14763 string = string_constant (exp1, &index);
14766 tree low_bound = array_ref_low_bound (exp);
14767 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14769 /* Optimize the special-case of a zero lower bound.
14771 We convert the low_bound to sizetype to avoid some problems
14772 with constant folding. (E.g. suppose the lower bound is 1,
14773 and its mode is QI. Without the conversion,l (ARRAY
14774 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14775 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14776 if (! integer_zerop (low_bound))
14777 index = size_diffop (index, fold_convert (sizetype, low_bound));
14783 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14784 && TREE_CODE (string) == STRING_CST
14785 && TREE_CODE (index) == INTEGER_CST
14786 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14787 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14789 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14790 return build_int_cst_type (TREE_TYPE (exp),
14791 (TREE_STRING_POINTER (string)
14792 [TREE_INT_CST_LOW (index)]));
14797 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14798 an integer constant, real, or fixed-point constant.
14800 TYPE is the type of the result. */
14803 fold_negate_const (tree arg0, tree type)
14805 tree t = NULL_TREE;
14807 switch (TREE_CODE (arg0))
14811 unsigned HOST_WIDE_INT low;
14812 HOST_WIDE_INT high;
14813 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14814 TREE_INT_CST_HIGH (arg0),
14816 t = force_fit_type_double (type, low, high, 1,
14817 (overflow | TREE_OVERFLOW (arg0))
14818 && !TYPE_UNSIGNED (type));
14823 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14828 FIXED_VALUE_TYPE f;
14829 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14830 &(TREE_FIXED_CST (arg0)), NULL,
14831 TYPE_SATURATING (type));
14832 t = build_fixed (type, f);
14833 /* Propagate overflow flags. */
14834 if (overflow_p | TREE_OVERFLOW (arg0))
14836 TREE_OVERFLOW (t) = 1;
14837 TREE_CONSTANT_OVERFLOW (t) = 1;
14839 else if (TREE_CONSTANT_OVERFLOW (arg0))
14840 TREE_CONSTANT_OVERFLOW (t) = 1;
14845 gcc_unreachable ();
14851 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14852 an integer constant or real constant.
14854 TYPE is the type of the result. */
14857 fold_abs_const (tree arg0, tree type)
14859 tree t = NULL_TREE;
14861 switch (TREE_CODE (arg0))
14864 /* If the value is unsigned, then the absolute value is
14865 the same as the ordinary value. */
14866 if (TYPE_UNSIGNED (type))
14868 /* Similarly, if the value is non-negative. */
14869 else if (INT_CST_LT (integer_minus_one_node, arg0))
14871 /* If the value is negative, then the absolute value is
14875 unsigned HOST_WIDE_INT low;
14876 HOST_WIDE_INT high;
14877 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14878 TREE_INT_CST_HIGH (arg0),
14880 t = force_fit_type_double (type, low, high, -1,
14881 overflow | TREE_OVERFLOW (arg0));
14886 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14887 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14893 gcc_unreachable ();
14899 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14900 constant. TYPE is the type of the result. */
14903 fold_not_const (tree arg0, tree type)
14905 tree t = NULL_TREE;
14907 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14909 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14910 ~TREE_INT_CST_HIGH (arg0), 0,
14911 TREE_OVERFLOW (arg0));
14916 /* Given CODE, a relational operator, the target type, TYPE and two
14917 constant operands OP0 and OP1, return the result of the
14918 relational operation. If the result is not a compile time
14919 constant, then return NULL_TREE. */
14922 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14924 int result, invert;
14926 /* From here on, the only cases we handle are when the result is
14927 known to be a constant. */
14929 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14931 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14932 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14934 /* Handle the cases where either operand is a NaN. */
14935 if (real_isnan (c0) || real_isnan (c1))
14945 case UNORDERED_EXPR:
14959 if (flag_trapping_math)
14965 gcc_unreachable ();
14968 return constant_boolean_node (result, type);
14971 return constant_boolean_node (real_compare (code, c0, c1), type);
14974 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14976 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14977 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14978 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14981 /* Handle equality/inequality of complex constants. */
14982 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14984 tree rcond = fold_relational_const (code, type,
14985 TREE_REALPART (op0),
14986 TREE_REALPART (op1));
14987 tree icond = fold_relational_const (code, type,
14988 TREE_IMAGPART (op0),
14989 TREE_IMAGPART (op1));
14990 if (code == EQ_EXPR)
14991 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14992 else if (code == NE_EXPR)
14993 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14998 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15000 To compute GT, swap the arguments and do LT.
15001 To compute GE, do LT and invert the result.
15002 To compute LE, swap the arguments, do LT and invert the result.
15003 To compute NE, do EQ and invert the result.
15005 Therefore, the code below must handle only EQ and LT. */
15007 if (code == LE_EXPR || code == GT_EXPR)
15012 code = swap_tree_comparison (code);
15015 /* Note that it is safe to invert for real values here because we
15016 have already handled the one case that it matters. */
15019 if (code == NE_EXPR || code == GE_EXPR)
15022 code = invert_tree_comparison (code, false);
15025 /* Compute a result for LT or EQ if args permit;
15026 Otherwise return T. */
15027 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15029 if (code == EQ_EXPR)
15030 result = tree_int_cst_equal (op0, op1);
15031 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15032 result = INT_CST_LT_UNSIGNED (op0, op1);
15034 result = INT_CST_LT (op0, op1);
15041 return constant_boolean_node (result, type);
15044 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15045 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15049 fold_build_cleanup_point_expr (tree type, tree expr)
15051 /* If the expression does not have side effects then we don't have to wrap
15052 it with a cleanup point expression. */
15053 if (!TREE_SIDE_EFFECTS (expr))
15056 /* If the expression is a return, check to see if the expression inside the
15057 return has no side effects or the right hand side of the modify expression
15058 inside the return. If either don't have side effects set we don't need to
15059 wrap the expression in a cleanup point expression. Note we don't check the
15060 left hand side of the modify because it should always be a return decl. */
15061 if (TREE_CODE (expr) == RETURN_EXPR)
15063 tree op = TREE_OPERAND (expr, 0);
15064 if (!op || !TREE_SIDE_EFFECTS (op))
15066 op = TREE_OPERAND (op, 1);
15067 if (!TREE_SIDE_EFFECTS (op))
15071 return build1 (CLEANUP_POINT_EXPR, type, expr);
15074 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15075 of an indirection through OP0, or NULL_TREE if no simplification is
15079 fold_indirect_ref_1 (tree type, tree op0)
15085 subtype = TREE_TYPE (sub);
15086 if (!POINTER_TYPE_P (subtype))
15089 if (TREE_CODE (sub) == ADDR_EXPR)
15091 tree op = TREE_OPERAND (sub, 0);
15092 tree optype = TREE_TYPE (op);
15093 /* *&CONST_DECL -> to the value of the const decl. */
15094 if (TREE_CODE (op) == CONST_DECL)
15095 return DECL_INITIAL (op);
15096 /* *&p => p; make sure to handle *&"str"[cst] here. */
15097 if (type == optype)
15099 tree fop = fold_read_from_constant_string (op);
15105 /* *(foo *)&fooarray => fooarray[0] */
15106 else if (TREE_CODE (optype) == ARRAY_TYPE
15107 && type == TREE_TYPE (optype))
15109 tree type_domain = TYPE_DOMAIN (optype);
15110 tree min_val = size_zero_node;
15111 if (type_domain && TYPE_MIN_VALUE (type_domain))
15112 min_val = TYPE_MIN_VALUE (type_domain);
15113 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15115 /* *(foo *)&complexfoo => __real__ complexfoo */
15116 else if (TREE_CODE (optype) == COMPLEX_TYPE
15117 && type == TREE_TYPE (optype))
15118 return fold_build1 (REALPART_EXPR, type, op);
15119 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15120 else if (TREE_CODE (optype) == VECTOR_TYPE
15121 && type == TREE_TYPE (optype))
15123 tree part_width = TYPE_SIZE (type);
15124 tree index = bitsize_int (0);
15125 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15129 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15130 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15131 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15133 tree op00 = TREE_OPERAND (sub, 0);
15134 tree op01 = TREE_OPERAND (sub, 1);
15138 op00type = TREE_TYPE (op00);
15139 if (TREE_CODE (op00) == ADDR_EXPR
15140 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15141 && type == TREE_TYPE (TREE_TYPE (op00type)))
15143 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15144 tree part_width = TYPE_SIZE (type);
15145 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15146 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15147 tree index = bitsize_int (indexi);
15149 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15150 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15151 part_width, index);
15157 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15158 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15159 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15161 tree op00 = TREE_OPERAND (sub, 0);
15162 tree op01 = TREE_OPERAND (sub, 1);
15166 op00type = TREE_TYPE (op00);
15167 if (TREE_CODE (op00) == ADDR_EXPR
15168 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15169 && type == TREE_TYPE (TREE_TYPE (op00type)))
15171 tree size = TYPE_SIZE_UNIT (type);
15172 if (tree_int_cst_equal (size, op01))
15173 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15177 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15178 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15179 && type == TREE_TYPE (TREE_TYPE (subtype)))
15182 tree min_val = size_zero_node;
15183 sub = build_fold_indirect_ref (sub);
15184 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15185 if (type_domain && TYPE_MIN_VALUE (type_domain))
15186 min_val = TYPE_MIN_VALUE (type_domain);
15187 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15193 /* Builds an expression for an indirection through T, simplifying some
15197 build_fold_indirect_ref (tree t)
15199 tree type = TREE_TYPE (TREE_TYPE (t));
15200 tree sub = fold_indirect_ref_1 (type, t);
15205 return build1 (INDIRECT_REF, type, t);
15208 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15211 fold_indirect_ref (tree t)
15213 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15221 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15222 whose result is ignored. The type of the returned tree need not be
15223 the same as the original expression. */
15226 fold_ignored_result (tree t)
15228 if (!TREE_SIDE_EFFECTS (t))
15229 return integer_zero_node;
15232 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15235 t = TREE_OPERAND (t, 0);
15239 case tcc_comparison:
15240 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15241 t = TREE_OPERAND (t, 0);
15242 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15243 t = TREE_OPERAND (t, 1);
15248 case tcc_expression:
15249 switch (TREE_CODE (t))
15251 case COMPOUND_EXPR:
15252 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15254 t = TREE_OPERAND (t, 0);
15258 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15259 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15261 t = TREE_OPERAND (t, 0);
15274 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15275 This can only be applied to objects of a sizetype. */
15278 round_up (tree value, int divisor)
15280 tree div = NULL_TREE;
15282 gcc_assert (divisor > 0);
15286 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15287 have to do anything. Only do this when we are not given a const,
15288 because in that case, this check is more expensive than just
15290 if (TREE_CODE (value) != INTEGER_CST)
15292 div = build_int_cst (TREE_TYPE (value), divisor);
15294 if (multiple_of_p (TREE_TYPE (value), value, div))
15298 /* If divisor is a power of two, simplify this to bit manipulation. */
15299 if (divisor == (divisor & -divisor))
15301 if (TREE_CODE (value) == INTEGER_CST)
15303 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15304 unsigned HOST_WIDE_INT high;
15307 if ((low & (divisor - 1)) == 0)
15310 overflow_p = TREE_OVERFLOW (value);
15311 high = TREE_INT_CST_HIGH (value);
15312 low &= ~(divisor - 1);
15321 return force_fit_type_double (TREE_TYPE (value), low, high,
15328 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15329 value = size_binop (PLUS_EXPR, value, t);
15330 t = build_int_cst (TREE_TYPE (value), -divisor);
15331 value = size_binop (BIT_AND_EXPR, value, t);
15337 div = build_int_cst (TREE_TYPE (value), divisor);
15338 value = size_binop (CEIL_DIV_EXPR, value, div);
15339 value = size_binop (MULT_EXPR, value, div);
15345 /* Likewise, but round down. */
15348 round_down (tree value, int divisor)
15350 tree div = NULL_TREE;
15352 gcc_assert (divisor > 0);
15356 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15357 have to do anything. Only do this when we are not given a const,
15358 because in that case, this check is more expensive than just
15360 if (TREE_CODE (value) != INTEGER_CST)
15362 div = build_int_cst (TREE_TYPE (value), divisor);
15364 if (multiple_of_p (TREE_TYPE (value), value, div))
15368 /* If divisor is a power of two, simplify this to bit manipulation. */
15369 if (divisor == (divisor & -divisor))
15373 t = build_int_cst (TREE_TYPE (value), -divisor);
15374 value = size_binop (BIT_AND_EXPR, value, t);
15379 div = build_int_cst (TREE_TYPE (value), divisor);
15380 value = size_binop (FLOOR_DIV_EXPR, value, div);
15381 value = size_binop (MULT_EXPR, value, div);
15387 /* Returns the pointer to the base of the object addressed by EXP and
15388 extracts the information about the offset of the access, storing it
15389 to PBITPOS and POFFSET. */
15392 split_address_to_core_and_offset (tree exp,
15393 HOST_WIDE_INT *pbitpos, tree *poffset)
15396 enum machine_mode mode;
15397 int unsignedp, volatilep;
15398 HOST_WIDE_INT bitsize;
15400 if (TREE_CODE (exp) == ADDR_EXPR)
15402 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15403 poffset, &mode, &unsignedp, &volatilep,
15405 core = fold_addr_expr (core);
15411 *poffset = NULL_TREE;
15417 /* Returns true if addresses of E1 and E2 differ by a constant, false
15418 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15421 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15424 HOST_WIDE_INT bitpos1, bitpos2;
15425 tree toffset1, toffset2, tdiff, type;
15427 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15428 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15430 if (bitpos1 % BITS_PER_UNIT != 0
15431 || bitpos2 % BITS_PER_UNIT != 0
15432 || !operand_equal_p (core1, core2, 0))
15435 if (toffset1 && toffset2)
15437 type = TREE_TYPE (toffset1);
15438 if (type != TREE_TYPE (toffset2))
15439 toffset2 = fold_convert (type, toffset2);
15441 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15442 if (!cst_and_fits_in_hwi (tdiff))
15445 *diff = int_cst_value (tdiff);
15447 else if (toffset1 || toffset2)
15449 /* If only one of the offsets is non-constant, the difference cannot
15456 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15460 /* Simplify the floating point expression EXP when the sign of the
15461 result is not significant. Return NULL_TREE if no simplification
15465 fold_strip_sign_ops (tree exp)
15469 switch (TREE_CODE (exp))
15473 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15474 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15478 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15480 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15481 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15482 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15483 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15484 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15485 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15488 case COMPOUND_EXPR:
15489 arg0 = TREE_OPERAND (exp, 0);
15490 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15492 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15496 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15497 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15499 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15500 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15501 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15506 const enum built_in_function fcode = builtin_mathfn_code (exp);
15509 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15510 /* Strip copysign function call, return the 1st argument. */
15511 arg0 = CALL_EXPR_ARG (exp, 0);
15512 arg1 = CALL_EXPR_ARG (exp, 1);
15513 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15516 /* Strip sign ops from the argument of "odd" math functions. */
15517 if (negate_mathfn_p (fcode))
15519 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15521 return build_call_expr (get_callee_fndecl (exp), 1, arg0);