1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
71 int folding_initializer = 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
95 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
96 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
97 static bool negate_mathfn_p (enum built_in_function);
98 static bool negate_expr_p (tree);
99 static tree negate_expr (tree);
100 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
101 static tree associate_trees (tree, tree, enum tree_code, tree);
102 static tree const_binop (enum tree_code, tree, tree, int);
103 static enum comparison_code comparison_to_compcode (enum tree_code);
104 static enum tree_code compcode_to_comparison (enum comparison_code);
105 static tree combine_comparisons (enum tree_code, enum tree_code,
106 enum tree_code, tree, tree, tree);
107 static int truth_value_p (enum tree_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand (tree, tree, tree);
112 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static tree sign_bit_p (tree, const_tree);
117 static int simple_operand_p (const_tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *, bool *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* Force the double-word integer L1, H1 to be within the range of the
192 integer type TYPE. Stores the properly truncated and sign-extended
193 double-word integer in *LV, *HV. Returns true if the operation
194 overflows, that is, argument and result are different. */
197 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
198 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
200 unsigned HOST_WIDE_INT low0 = l1;
201 HOST_WIDE_INT high0 = h1;
203 int sign_extended_type;
205 if (POINTER_TYPE_P (type)
206 || TREE_CODE (type) == OFFSET_TYPE)
209 prec = TYPE_PRECISION (type);
211 /* Size types *are* sign extended. */
212 sign_extended_type = (!TYPE_UNSIGNED (type)
213 || (TREE_CODE (type) == INTEGER_TYPE
214 && TYPE_IS_SIZETYPE (type)));
216 /* First clear all bits that are beyond the type's precision. */
217 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 if (prec < HOST_BITS_PER_WIDE_INT)
225 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
228 /* Then do sign extension if necessary. */
229 if (!sign_extended_type)
230 /* No sign extension */;
231 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
232 /* Correct width already. */;
233 else if (prec > HOST_BITS_PER_WIDE_INT)
235 /* Sign extend top half? */
236 if (h1 & ((unsigned HOST_WIDE_INT)1
237 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
238 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240 else if (prec == HOST_BITS_PER_WIDE_INT)
242 if ((HOST_WIDE_INT)l1 < 0)
247 /* Sign extend bottom half? */
248 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
251 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 /* If the value didn't fit, signal overflow. */
259 return l1 != low0 || h1 != high0;
262 /* We force the double-int HIGH:LOW to the range of the type TYPE by
263 sign or zero extending it.
264 OVERFLOWABLE indicates if we are interested
265 in overflow of the value, when >0 we are only interested in signed
266 overflow, for <0 we are interested in any overflow. OVERFLOWED
267 indicates whether overflow has already occurred. CONST_OVERFLOWED
268 indicates whether constant overflow has already occurred. We force
269 T's value to be within range of T's type (by setting to 0 or 1 all
270 the bits outside the type's range). We set TREE_OVERFLOWED if,
271 OVERFLOWED is nonzero,
272 or OVERFLOWABLE is >0 and signed overflow occurs
273 or OVERFLOWABLE is <0 and any overflow occurs
274 We return a new tree node for the extended double-int. The node
275 is shared if no overflow flags are set. */
278 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
279 HOST_WIDE_INT high, int overflowable,
282 int sign_extended_type;
285 /* Size types *are* sign extended. */
286 sign_extended_type = (!TYPE_UNSIGNED (type)
287 || (TREE_CODE (type) == INTEGER_TYPE
288 && TYPE_IS_SIZETYPE (type)));
290 overflow = fit_double_type (low, high, &low, &high, type);
292 /* If we need to set overflow flags, return a new unshared node. */
293 if (overflowed || overflow)
297 || (overflowable > 0 && sign_extended_type))
299 tree t = make_node (INTEGER_CST);
300 TREE_INT_CST_LOW (t) = low;
301 TREE_INT_CST_HIGH (t) = high;
302 TREE_TYPE (t) = type;
303 TREE_OVERFLOW (t) = 1;
308 /* Else build a shared node. */
309 return build_int_cst_wide (type, low, high);
312 /* Add two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows according to UNSIGNED_P.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
324 unsigned HOST_WIDE_INT l;
328 h = h1 + h2 + (l < l1);
334 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
336 return OVERFLOW_SUM_SIGN (h1, h2, h);
339 /* Negate a doubleword integer with doubleword result.
340 Return nonzero if the operation overflows, assuming it's signed.
341 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
342 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
345 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352 return (*hv & h1) < 0;
362 /* Multiply two doubleword integers with doubleword result.
363 Return nonzero if the operation overflows according to UNSIGNED_P.
364 Each argument is given as two `HOST_WIDE_INT' pieces.
365 One argument is L1 and H1; the other, L2 and H2.
366 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
370 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
371 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
374 HOST_WIDE_INT arg1[4];
375 HOST_WIDE_INT arg2[4];
376 HOST_WIDE_INT prod[4 * 2];
377 unsigned HOST_WIDE_INT carry;
379 unsigned HOST_WIDE_INT toplow, neglow;
380 HOST_WIDE_INT tophigh, neghigh;
382 encode (arg1, l1, h1);
383 encode (arg2, l2, h2);
385 memset (prod, 0, sizeof prod);
387 for (i = 0; i < 4; i++)
390 for (j = 0; j < 4; j++)
393 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
394 carry += arg1[i] * arg2[j];
395 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
397 prod[k] = LOWPART (carry);
398 carry = HIGHPART (carry);
403 decode (prod, lv, hv);
404 decode (prod + 4, &toplow, &tophigh);
406 /* Unsigned overflow is immediate. */
408 return (toplow | tophigh) != 0;
410 /* Check for signed overflow by calculating the signed representation of the
411 top half of the result; it should agree with the low half's sign bit. */
414 neg_double (l2, h2, &neglow, &neghigh);
415 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
419 neg_double (l1, h1, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
425 /* Shift the doubleword integer in L1, H1 left by COUNT places
426 keeping only PREC bits of result.
427 Shift right if COUNT is negative.
428 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
429 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
432 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
433 HOST_WIDE_INT count, unsigned int prec,
434 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
436 unsigned HOST_WIDE_INT signmask;
440 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 if (SHIFT_COUNT_TRUNCATED)
447 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
449 /* Shifting by the host word size is undefined according to the
450 ANSI standard, so we must handle this as a special case. */
454 else if (count >= HOST_BITS_PER_WIDE_INT)
456 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
461 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
462 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 /* Sign extend all bits that are beyond the precision. */
468 signmask = -((prec > HOST_BITS_PER_WIDE_INT
469 ? ((unsigned HOST_WIDE_INT) *hv
470 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
471 : (*lv >> (prec - 1))) & 1);
473 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
475 else if (prec >= HOST_BITS_PER_WIDE_INT)
477 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
478 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
483 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
484 *lv |= signmask << prec;
488 /* Shift the doubleword integer in L1, H1 right by COUNT places
489 keeping only PREC bits of result. COUNT must be positive.
490 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
491 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
494 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
495 HOST_WIDE_INT count, unsigned int prec,
496 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
499 unsigned HOST_WIDE_INT signmask;
502 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
505 if (SHIFT_COUNT_TRUNCATED)
508 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
510 /* Shifting by the host word size is undefined according to the
511 ANSI standard, so we must handle this as a special case. */
515 else if (count >= HOST_BITS_PER_WIDE_INT)
518 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
522 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
524 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
527 /* Zero / sign extend all bits that are beyond the precision. */
529 if (count >= (HOST_WIDE_INT)prec)
534 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
536 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
538 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
539 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
544 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
545 *lv |= signmask << (prec - count);
549 /* Rotate the doubleword integer in L1, H1 left by COUNT places
550 keeping only PREC bits of result.
551 Rotate right if COUNT is negative.
552 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
555 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
556 HOST_WIDE_INT count, unsigned int prec,
557 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
559 unsigned HOST_WIDE_INT s1l, s2l;
560 HOST_WIDE_INT s1h, s2h;
566 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
567 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
572 /* Rotate the doubleword integer in L1, H1 left by COUNT places
573 keeping only PREC bits of result. COUNT must be positive.
574 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
577 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
578 HOST_WIDE_INT count, unsigned int prec,
579 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
581 unsigned HOST_WIDE_INT s1l, s2l;
582 HOST_WIDE_INT s1h, s2h;
588 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
589 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
594 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
595 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
596 CODE is a tree code for a kind of division, one of
597 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
599 It controls how the quotient is rounded to an integer.
600 Return nonzero if the operation overflows.
601 UNS nonzero says do unsigned division. */
604 div_and_round_double (enum tree_code code, int uns,
605 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
606 HOST_WIDE_INT hnum_orig,
607 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
608 HOST_WIDE_INT hden_orig,
609 unsigned HOST_WIDE_INT *lquo,
610 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
615 HOST_WIDE_INT den[4], quo[4];
617 unsigned HOST_WIDE_INT work;
618 unsigned HOST_WIDE_INT carry = 0;
619 unsigned HOST_WIDE_INT lnum = lnum_orig;
620 HOST_WIDE_INT hnum = hnum_orig;
621 unsigned HOST_WIDE_INT lden = lden_orig;
622 HOST_WIDE_INT hden = hden_orig;
625 if (hden == 0 && lden == 0)
626 overflow = 1, lden = 1;
628 /* Calculate quotient sign and convert operands to unsigned. */
634 /* (minimum integer) / (-1) is the only overflow case. */
635 if (neg_double (lnum, hnum, &lnum, &hnum)
636 && ((HOST_WIDE_INT) lden & hden) == -1)
642 neg_double (lden, hden, &lden, &hden);
646 if (hnum == 0 && hden == 0)
647 { /* single precision */
649 /* This unsigned division rounds toward zero. */
655 { /* trivial case: dividend < divisor */
656 /* hden != 0 already checked. */
663 memset (quo, 0, sizeof quo);
665 memset (num, 0, sizeof num); /* to zero 9th element */
666 memset (den, 0, sizeof den);
668 encode (num, lnum, hnum);
669 encode (den, lden, hden);
671 /* Special code for when the divisor < BASE. */
672 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
674 /* hnum != 0 already checked. */
675 for (i = 4 - 1; i >= 0; i--)
677 work = num[i] + carry * BASE;
678 quo[i] = work / lden;
684 /* Full double precision division,
685 with thanks to Don Knuth's "Seminumerical Algorithms". */
686 int num_hi_sig, den_hi_sig;
687 unsigned HOST_WIDE_INT quo_est, scale;
689 /* Find the highest nonzero divisor digit. */
690 for (i = 4 - 1;; i--)
697 /* Insure that the first digit of the divisor is at least BASE/2.
698 This is required by the quotient digit estimation algorithm. */
700 scale = BASE / (den[den_hi_sig] + 1);
702 { /* scale divisor and dividend */
704 for (i = 0; i <= 4 - 1; i++)
706 work = (num[i] * scale) + carry;
707 num[i] = LOWPART (work);
708 carry = HIGHPART (work);
713 for (i = 0; i <= 4 - 1; i++)
715 work = (den[i] * scale) + carry;
716 den[i] = LOWPART (work);
717 carry = HIGHPART (work);
718 if (den[i] != 0) den_hi_sig = i;
725 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
727 /* Guess the next quotient digit, quo_est, by dividing the first
728 two remaining dividend digits by the high order quotient digit.
729 quo_est is never low and is at most 2 high. */
730 unsigned HOST_WIDE_INT tmp;
732 num_hi_sig = i + den_hi_sig + 1;
733 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
734 if (num[num_hi_sig] != den[den_hi_sig])
735 quo_est = work / den[den_hi_sig];
739 /* Refine quo_est so it's usually correct, and at most one high. */
740 tmp = work - quo_est * den[den_hi_sig];
742 && (den[den_hi_sig - 1] * quo_est
743 > (tmp * BASE + num[num_hi_sig - 2])))
746 /* Try QUO_EST as the quotient digit, by multiplying the
747 divisor by QUO_EST and subtracting from the remaining dividend.
748 Keep in mind that QUO_EST is the I - 1st digit. */
751 for (j = 0; j <= den_hi_sig; j++)
753 work = quo_est * den[j] + carry;
754 carry = HIGHPART (work);
755 work = num[i + j] - LOWPART (work);
756 num[i + j] = LOWPART (work);
757 carry += HIGHPART (work) != 0;
760 /* If quo_est was high by one, then num[i] went negative and
761 we need to correct things. */
762 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
765 carry = 0; /* add divisor back in */
766 for (j = 0; j <= den_hi_sig; j++)
768 work = num[i + j] + den[j] + carry;
769 carry = HIGHPART (work);
770 num[i + j] = LOWPART (work);
773 num [num_hi_sig] += carry;
776 /* Store the quotient digit. */
781 decode (quo, lquo, hquo);
784 /* If result is negative, make it so. */
786 neg_double (*lquo, *hquo, lquo, hquo);
788 /* Compute trial remainder: rem = num - (quo * den) */
789 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
790 neg_double (*lrem, *hrem, lrem, hrem);
791 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
796 case TRUNC_MOD_EXPR: /* round toward zero */
797 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 case FLOOR_MOD_EXPR: /* round toward negative infinity */
802 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
805 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
813 case CEIL_MOD_EXPR: /* round toward positive infinity */
814 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
816 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
824 case ROUND_MOD_EXPR: /* round to closest integer */
826 unsigned HOST_WIDE_INT labs_rem = *lrem;
827 HOST_WIDE_INT habs_rem = *hrem;
828 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
829 HOST_WIDE_INT habs_den = hden, htwice;
831 /* Get absolute values. */
833 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
835 neg_double (lden, hden, &labs_den, &habs_den);
837 /* If (2 * abs (lrem) >= abs (lden)) */
838 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
839 labs_rem, habs_rem, <wice, &htwice);
841 if (((unsigned HOST_WIDE_INT) habs_den
842 < (unsigned HOST_WIDE_INT) htwice)
843 || (((unsigned HOST_WIDE_INT) habs_den
844 == (unsigned HOST_WIDE_INT) htwice)
845 && (labs_den < ltwice)))
849 add_double (*lquo, *hquo,
850 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
853 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
865 /* Compute true remainder: rem = num - (quo * den) */
866 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
867 neg_double (*lrem, *hrem, lrem, hrem);
868 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 /* If ARG2 divides ARG1 with zero remainder, carries out the division
873 of type CODE and returns the quotient.
874 Otherwise returns NULL_TREE. */
877 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
879 unsigned HOST_WIDE_INT int1l, int2l;
880 HOST_WIDE_INT int1h, int2h;
881 unsigned HOST_WIDE_INT quol, reml;
882 HOST_WIDE_INT quoh, remh;
883 tree type = TREE_TYPE (arg1);
884 int uns = TYPE_UNSIGNED (type);
886 int1l = TREE_INT_CST_LOW (arg1);
887 int1h = TREE_INT_CST_HIGH (arg1);
888 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
889 &obj[some_exotic_number]. */
890 if (POINTER_TYPE_P (type))
893 type = signed_type_for (type);
894 fit_double_type (int1l, int1h, &int1l, &int1h,
898 fit_double_type (int1l, int1h, &int1l, &int1h, type);
899 int2l = TREE_INT_CST_LOW (arg2);
900 int2h = TREE_INT_CST_HIGH (arg2);
902 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
903 &quol, &quoh, &reml, &remh);
904 if (remh != 0 || reml != 0)
907 return build_int_cst_wide (type, quol, quoh);
910 /* This is nonzero if we should defer warnings about undefined
911 overflow. This facility exists because these warnings are a
912 special case. The code to estimate loop iterations does not want
913 to issue any warnings, since it works with expressions which do not
914 occur in user code. Various bits of cleanup code call fold(), but
915 only use the result if it has certain characteristics (e.g., is a
916 constant); that code only wants to issue a warning if the result is
919 static int fold_deferring_overflow_warnings;
921 /* If a warning about undefined overflow is deferred, this is the
922 warning. Note that this may cause us to turn two warnings into
923 one, but that is fine since it is sufficient to only give one
924 warning per expression. */
926 static const char* fold_deferred_overflow_warning;
928 /* If a warning about undefined overflow is deferred, this is the
929 level at which the warning should be emitted. */
931 static enum warn_strict_overflow_code fold_deferred_overflow_code;
933 /* Start deferring overflow warnings. We could use a stack here to
934 permit nested calls, but at present it is not necessary. */
937 fold_defer_overflow_warnings (void)
939 ++fold_deferring_overflow_warnings;
942 /* Stop deferring overflow warnings. If there is a pending warning,
943 and ISSUE is true, then issue the warning if appropriate. STMT is
944 the statement with which the warning should be associated (used for
945 location information); STMT may be NULL. CODE is the level of the
946 warning--a warn_strict_overflow_code value. This function will use
947 the smaller of CODE and the deferred code when deciding whether to
948 issue the warning. CODE may be zero to mean to always use the
952 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
957 gcc_assert (fold_deferring_overflow_warnings > 0);
958 --fold_deferring_overflow_warnings;
959 if (fold_deferring_overflow_warnings > 0)
961 if (fold_deferred_overflow_warning != NULL
963 && code < (int) fold_deferred_overflow_code)
964 fold_deferred_overflow_code = code;
968 warnmsg = fold_deferred_overflow_warning;
969 fold_deferred_overflow_warning = NULL;
971 if (!issue || warnmsg == NULL)
974 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
977 /* Use the smallest code level when deciding to issue the
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
982 if (!issue_strict_overflow_warning (code))
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
992 /* Stop deferring overflow warnings, ignoring any deferred
996 fold_undefer_and_ignore_overflow_warnings (void)
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1001 /* Whether we are deferring overflow warnings. */
1004 fold_deferring_overflow_warnings_p (void)
1006 return fold_deferring_overflow_warnings > 0;
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1032 negate_mathfn_p (enum built_in_function code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1073 /* Check whether we may negate an integer constant T without causing
1077 may_negate_without_overflow_p (const_tree t)
1079 unsigned HOST_WIDE_INT val;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1108 negate_expr_p (tree t)
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1121 if (TYPE_OVERFLOW_WRAPS (type))
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1197 tree tem = strip_float_extensions (t);
1199 return negate_expr_p (tem);
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1233 fold_negate_expr (tree t)
1235 tree type = TREE_TYPE (t);
1238 switch (TREE_CODE (t))
1240 /* Convert - (~A) to A + 1. */
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1262 tem = fold_negate_const (t, type);
1267 tree rpart = negate_expr (TREE_REALPART (t));
1268 tree ipart = negate_expr (TREE_IMAGPART (t));
1270 if ((TREE_CODE (rpart) == REAL_CST
1271 && TREE_CODE (ipart) == REAL_CST)
1272 || (TREE_CODE (rpart) == INTEGER_CST
1273 && TREE_CODE (ipart) == INTEGER_CST))
1274 return build_complex (type, rpart, ipart);
1279 if (negate_expr_p (t))
1280 return fold_build2 (COMPLEX_EXPR, type,
1281 fold_negate_expr (TREE_OPERAND (t, 0)),
1282 fold_negate_expr (TREE_OPERAND (t, 1)));
1286 if (negate_expr_p (t))
1287 return fold_build1 (CONJ_EXPR, type,
1288 fold_negate_expr (TREE_OPERAND (t, 0)));
1292 return TREE_OPERAND (t, 0);
1295 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 /* -(A + B) -> (-B) - A. */
1299 if (negate_expr_p (TREE_OPERAND (t, 1))
1300 && reorder_operands_p (TREE_OPERAND (t, 0),
1301 TREE_OPERAND (t, 1)))
1303 tem = negate_expr (TREE_OPERAND (t, 1));
1304 return fold_build2 (MINUS_EXPR, type,
1305 tem, TREE_OPERAND (t, 0));
1308 /* -(A + B) -> (-A) - B. */
1309 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 tem = negate_expr (TREE_OPERAND (t, 0));
1312 return fold_build2 (MINUS_EXPR, type,
1313 tem, TREE_OPERAND (t, 1));
1319 /* - (A - B) -> B - A */
1320 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1321 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1322 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1323 return fold_build2 (MINUS_EXPR, type,
1324 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1328 if (TYPE_UNSIGNED (type))
1334 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 tem = TREE_OPERAND (t, 1);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 TREE_OPERAND (t, 0), negate_expr (tem));
1340 tem = TREE_OPERAND (t, 0);
1341 if (negate_expr_p (tem))
1342 return fold_build2 (TREE_CODE (t), type,
1343 negate_expr (tem), TREE_OPERAND (t, 1));
1347 case TRUNC_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 case FLOOR_DIV_EXPR:
1351 case EXACT_DIV_EXPR:
1352 /* In general we can't negate A / B, because if A is INT_MIN and
1353 B is 1, we may turn this into INT_MIN / -1 which is undefined
1354 and actually traps on some architectures. But if overflow is
1355 undefined, we can negate, because - (INT_MIN / 1) is an
1357 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 const char * const warnmsg = G_("assuming signed overflow does not "
1360 "occur when negating a division");
1361 tem = TREE_OPERAND (t, 1);
1362 if (negate_expr_p (tem))
1364 if (INTEGRAL_TYPE_P (type)
1365 && (TREE_CODE (tem) != INTEGER_CST
1366 || integer_onep (tem)))
1367 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1368 return fold_build2 (TREE_CODE (t), type,
1369 TREE_OPERAND (t, 0), negate_expr (tem));
1371 tem = TREE_OPERAND (t, 0);
1372 if (negate_expr_p (tem))
1374 if (INTEGRAL_TYPE_P (type)
1375 && (TREE_CODE (tem) != INTEGER_CST
1376 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1377 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1378 return fold_build2 (TREE_CODE (t), type,
1379 negate_expr (tem), TREE_OPERAND (t, 1));
1385 /* Convert -((double)float) into (double)(-float). */
1386 if (TREE_CODE (type) == REAL_TYPE)
1388 tem = strip_float_extensions (t);
1389 if (tem != t && negate_expr_p (tem))
1390 return fold_convert (type, negate_expr (tem));
1395 /* Negate -f(x) as f(-x). */
1396 if (negate_mathfn_p (builtin_mathfn_code (t))
1397 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1401 fndecl = get_callee_fndecl (t);
1402 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1403 return build_call_expr (fndecl, 1, arg);
1408 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1409 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 tree op1 = TREE_OPERAND (t, 1);
1412 if (TREE_INT_CST_HIGH (op1) == 0
1413 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1414 == TREE_INT_CST_LOW (op1))
1416 tree ntype = TYPE_UNSIGNED (type)
1417 ? signed_type_for (type)
1418 : unsigned_type_for (type);
1419 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1420 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1421 return fold_convert (type, temp);
1433 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1434 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1435 return NULL_TREE. */
1438 negate_expr (tree t)
1445 type = TREE_TYPE (t);
1446 STRIP_SIGN_NOPS (t);
1448 tem = fold_negate_expr (t);
1450 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1451 return fold_convert (type, tem);
1454 /* Split a tree IN into a constant, literal and variable parts that could be
1455 combined with CODE to make IN. "constant" means an expression with
1456 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1457 commutative arithmetic operation. Store the constant part into *CONP,
1458 the literal in *LITP and return the variable part. If a part isn't
1459 present, set it to null. If the tree does not decompose in this way,
1460 return the entire tree as the variable part and the other parts as null.
1462 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1463 case, we negate an operand that was subtracted. Except if it is a
1464 literal for which we use *MINUS_LITP instead.
1466 If NEGATE_P is true, we are negating all of IN, again except a literal
1467 for which we use *MINUS_LITP instead.
1469 If IN is itself a literal or constant, return it as appropriate.
1471 Note that we do not guarantee that any of the three values will be the
1472 same type as IN, but they will have the same signedness and mode. */
1475 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1476 tree *minus_litp, int negate_p)
1484 /* Strip any conversions that don't change the machine mode or signedness. */
1485 STRIP_SIGN_NOPS (in);
1487 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1488 || TREE_CODE (in) == FIXED_CST)
1490 else if (TREE_CODE (in) == code
1491 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1492 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1493 /* We can associate addition and subtraction together (even
1494 though the C standard doesn't say so) for integers because
1495 the value is not affected. For reals, the value might be
1496 affected, so we can't. */
1497 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1498 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 tree op0 = TREE_OPERAND (in, 0);
1501 tree op1 = TREE_OPERAND (in, 1);
1502 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1503 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505 /* First see if either of the operands is a literal, then a constant. */
1506 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1507 || TREE_CODE (op0) == FIXED_CST)
1508 *litp = op0, op0 = 0;
1509 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1510 || TREE_CODE (op1) == FIXED_CST)
1511 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513 if (op0 != 0 && TREE_CONSTANT (op0))
1514 *conp = op0, op0 = 0;
1515 else if (op1 != 0 && TREE_CONSTANT (op1))
1516 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518 /* If we haven't dealt with either operand, this is not a case we can
1519 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1520 if (op0 != 0 && op1 != 0)
1525 var = op1, neg_var_p = neg1_p;
1527 /* Now do any needed negations. */
1529 *minus_litp = *litp, *litp = 0;
1531 *conp = negate_expr (*conp);
1533 var = negate_expr (var);
1535 else if (TREE_CONSTANT (in))
1543 *minus_litp = *litp, *litp = 0;
1544 else if (*minus_litp)
1545 *litp = *minus_litp, *minus_litp = 0;
1546 *conp = negate_expr (*conp);
1547 var = negate_expr (var);
1553 /* Re-associate trees split by the above function. T1 and T2 are either
1554 expressions to associate or null. Return the new expression, if any. If
1555 we build an operation, do it in TYPE and with CODE. */
1558 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1565 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1566 try to fold this since we will have infinite recursion. But do
1567 deal with any NEGATE_EXPRs. */
1568 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1569 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 if (code == PLUS_EXPR)
1573 if (TREE_CODE (t1) == NEGATE_EXPR)
1574 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1575 fold_convert (type, TREE_OPERAND (t1, 0)));
1576 else if (TREE_CODE (t2) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1578 fold_convert (type, TREE_OPERAND (t2, 0)));
1579 else if (integer_zerop (t2))
1580 return fold_convert (type, t1);
1582 else if (code == MINUS_EXPR)
1584 if (integer_zerop (t2))
1585 return fold_convert (type, t1);
1588 return build2 (code, type, fold_convert (type, t1),
1589 fold_convert (type, t2));
1592 return fold_build2 (code, type, fold_convert (type, t1),
1593 fold_convert (type, t2));
1596 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1597 for use in int_const_binop, size_binop and size_diffop. */
1600 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1602 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1604 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1619 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1620 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1621 && TYPE_MODE (type1) == TYPE_MODE (type2);
1625 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1626 to produce a new constant. Return NULL_TREE if we don't know how
1627 to evaluate CODE at compile-time.
1629 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1632 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 unsigned HOST_WIDE_INT int1l, int2l;
1635 HOST_WIDE_INT int1h, int2h;
1636 unsigned HOST_WIDE_INT low;
1638 unsigned HOST_WIDE_INT garbagel;
1639 HOST_WIDE_INT garbageh;
1641 tree type = TREE_TYPE (arg1);
1642 int uns = TYPE_UNSIGNED (type);
1644 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1647 int1l = TREE_INT_CST_LOW (arg1);
1648 int1h = TREE_INT_CST_HIGH (arg1);
1649 int2l = TREE_INT_CST_LOW (arg2);
1650 int2h = TREE_INT_CST_HIGH (arg2);
1655 low = int1l | int2l, hi = int1h | int2h;
1659 low = int1l ^ int2l, hi = int1h ^ int2h;
1663 low = int1l & int2l, hi = int1h & int2h;
1669 /* It's unclear from the C standard whether shifts can overflow.
1670 The following code ignores overflow; perhaps a C standard
1671 interpretation ruling is needed. */
1672 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1679 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1684 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1688 neg_double (int2l, int2h, &low, &hi);
1689 add_double (int1l, int1h, low, hi, &low, &hi);
1690 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1694 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1697 case TRUNC_DIV_EXPR:
1698 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1699 case EXACT_DIV_EXPR:
1700 /* This is a shortcut for a common special case. */
1701 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1702 && !TREE_OVERFLOW (arg1)
1703 && !TREE_OVERFLOW (arg2)
1704 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 if (code == CEIL_DIV_EXPR)
1709 low = int1l / int2l, hi = 0;
1713 /* ... fall through ... */
1715 case ROUND_DIV_EXPR:
1716 if (int2h == 0 && int2l == 0)
1718 if (int2h == 0 && int2l == 1)
1720 low = int1l, hi = int1h;
1723 if (int1l == int2l && int1h == int2h
1724 && ! (int1l == 0 && int1h == 0))
1729 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1730 &low, &hi, &garbagel, &garbageh);
1733 case TRUNC_MOD_EXPR:
1734 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1735 /* This is a shortcut for a common special case. */
1736 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1737 && !TREE_OVERFLOW (arg1)
1738 && !TREE_OVERFLOW (arg2)
1739 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 if (code == CEIL_MOD_EXPR)
1743 low = int1l % int2l, hi = 0;
1747 /* ... fall through ... */
1749 case ROUND_MOD_EXPR:
1750 if (int2h == 0 && int2l == 0)
1752 overflow = div_and_round_double (code, uns,
1753 int1l, int1h, int2l, int2h,
1754 &garbagel, &garbageh, &low, &hi);
1760 low = (((unsigned HOST_WIDE_INT) int1h
1761 < (unsigned HOST_WIDE_INT) int2h)
1762 || (((unsigned HOST_WIDE_INT) int1h
1763 == (unsigned HOST_WIDE_INT) int2h)
1766 low = (int1h < int2h
1767 || (int1h == int2h && int1l < int2l));
1769 if (low == (code == MIN_EXPR))
1770 low = int1l, hi = int1h;
1772 low = int2l, hi = int2h;
1781 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783 /* Propagate overflow flags ourselves. */
1784 if (((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1788 TREE_OVERFLOW (t) = 1;
1792 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1793 ((!uns || is_sizetype) && overflow)
1794 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1799 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1800 constant. We assume ARG1 and ARG2 have the same data type, or at least
1801 are the same kind of constant and the same machine mode. Return zero if
1802 combining the constants is not allowed in the current operating mode.
1804 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1807 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 /* Sanity check for the recursive cases. */
1816 if (TREE_CODE (arg1) == INTEGER_CST)
1817 return int_const_binop (code, arg1, arg2, notrunc);
1819 if (TREE_CODE (arg1) == REAL_CST)
1821 enum machine_mode mode;
1824 REAL_VALUE_TYPE value;
1825 REAL_VALUE_TYPE result;
1829 /* The following codes are handled by real_arithmetic. */
1844 d1 = TREE_REAL_CST (arg1);
1845 d2 = TREE_REAL_CST (arg2);
1847 type = TREE_TYPE (arg1);
1848 mode = TYPE_MODE (type);
1850 /* Don't perform operation if we honor signaling NaNs and
1851 either operand is a NaN. */
1852 if (HONOR_SNANS (mode)
1853 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1856 /* Don't perform operation if it would raise a division
1857 by zero exception. */
1858 if (code == RDIV_EXPR
1859 && REAL_VALUES_EQUAL (d2, dconst0)
1860 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1863 /* If either operand is a NaN, just return it. Otherwise, set up
1864 for floating-point trap; we return an overflow. */
1865 if (REAL_VALUE_ISNAN (d1))
1867 else if (REAL_VALUE_ISNAN (d2))
1870 inexact = real_arithmetic (&value, code, &d1, &d2);
1871 real_convert (&result, mode, &value);
1873 /* Don't constant fold this floating point operation if
1874 the result has overflowed and flag_trapping_math. */
1875 if (flag_trapping_math
1876 && MODE_HAS_INFINITIES (mode)
1877 && REAL_VALUE_ISINF (result)
1878 && !REAL_VALUE_ISINF (d1)
1879 && !REAL_VALUE_ISINF (d2))
1882 /* Don't constant fold this floating point operation if the
1883 result may dependent upon the run-time rounding mode and
1884 flag_rounding_math is set, or if GCC's software emulation
1885 is unable to accurately represent the result. */
1886 if ((flag_rounding_math
1887 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1888 && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1907 /* The following codes are handled by fixed_arithmetic. */
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
2003 return build_complex (type, real, imag);
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2185 /* See if R is less than the lower bound or greater than the
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2202 tree ut = TYPE_MAX_VALUE (type);
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2230 double_int temp, temp_trunc;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2401 if (TREE_CODE (arg1) == INTEGER_CST)
2402 return fold_convert_const_int_from_int (type, arg1);
2403 else if (TREE_CODE (arg1) == REAL_CST)
2404 return fold_convert_const_int_from_real (code, type, arg1);
2405 else if (TREE_CODE (arg1) == FIXED_CST)
2406 return fold_convert_const_int_from_fixed (type, arg1);
2408 else if (TREE_CODE (type) == REAL_TYPE)
2410 if (TREE_CODE (arg1) == INTEGER_CST)
2411 return build_real_from_int_cst (type, arg1);
2412 else if (TREE_CODE (arg1) == REAL_CST)
2413 return fold_convert_const_real_from_real (type, arg1);
2414 else if (TREE_CODE (arg1) == FIXED_CST)
2415 return fold_convert_const_real_from_fixed (type, arg1);
2417 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2419 if (TREE_CODE (arg1) == FIXED_CST)
2420 return fold_convert_const_fixed_from_fixed (type, arg1);
2421 else if (TREE_CODE (arg1) == INTEGER_CST)
2422 return fold_convert_const_fixed_from_int (type, arg1);
2423 else if (TREE_CODE (arg1) == REAL_CST)
2424 return fold_convert_const_fixed_from_real (type, arg1);
2429 /* Construct a vector of zero elements of vector type TYPE. */
2432 build_zero_vector (tree type)
2437 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2438 units = TYPE_VECTOR_SUBPARTS (type);
2441 for (i = 0; i < units; i++)
2442 list = tree_cons (NULL_TREE, elem, list);
2443 return build_vector (type, list);
2446 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2449 fold_convertible_p (const_tree type, const_tree arg)
2451 tree orig = TREE_TYPE (arg);
2456 if (TREE_CODE (arg) == ERROR_MARK
2457 || TREE_CODE (type) == ERROR_MARK
2458 || TREE_CODE (orig) == ERROR_MARK)
2461 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2464 switch (TREE_CODE (type))
2466 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2467 case POINTER_TYPE: case REFERENCE_TYPE:
2469 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2470 || TREE_CODE (orig) == OFFSET_TYPE)
2472 return (TREE_CODE (orig) == VECTOR_TYPE
2473 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2476 case FIXED_POINT_TYPE:
2480 return TREE_CODE (type) == TREE_CODE (orig);
2487 /* Convert expression ARG to type TYPE. Used by the middle-end for
2488 simple conversions in preference to calling the front-end's convert. */
2491 fold_convert (tree type, tree arg)
2493 tree orig = TREE_TYPE (arg);
2499 if (TREE_CODE (arg) == ERROR_MARK
2500 || TREE_CODE (type) == ERROR_MARK
2501 || TREE_CODE (orig) == ERROR_MARK)
2502 return error_mark_node;
2504 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2505 return fold_build1 (NOP_EXPR, type, arg);
2507 switch (TREE_CODE (type))
2509 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2510 case POINTER_TYPE: case REFERENCE_TYPE:
2512 if (TREE_CODE (arg) == INTEGER_CST)
2514 tem = fold_convert_const (NOP_EXPR, type, arg);
2515 if (tem != NULL_TREE)
2518 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2519 || TREE_CODE (orig) == OFFSET_TYPE)
2520 return fold_build1 (NOP_EXPR, type, arg);
2521 if (TREE_CODE (orig) == COMPLEX_TYPE)
2523 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2524 return fold_convert (type, tem);
2526 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2527 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2528 return fold_build1 (NOP_EXPR, type, arg);
2531 if (TREE_CODE (arg) == INTEGER_CST)
2533 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2534 if (tem != NULL_TREE)
2537 else if (TREE_CODE (arg) == REAL_CST)
2539 tem = fold_convert_const (NOP_EXPR, type, arg);
2540 if (tem != NULL_TREE)
2543 else if (TREE_CODE (arg) == FIXED_CST)
2545 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2546 if (tem != NULL_TREE)
2550 switch (TREE_CODE (orig))
2553 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2554 case POINTER_TYPE: case REFERENCE_TYPE:
2555 return fold_build1 (FLOAT_EXPR, type, arg);
2558 return fold_build1 (NOP_EXPR, type, arg);
2560 case FIXED_POINT_TYPE:
2561 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2564 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2565 return fold_convert (type, tem);
2571 case FIXED_POINT_TYPE:
2572 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2573 || TREE_CODE (arg) == REAL_CST)
2575 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2576 if (tem != NULL_TREE)
2580 switch (TREE_CODE (orig))
2582 case FIXED_POINT_TYPE:
2587 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2590 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2591 return fold_convert (type, tem);
2598 switch (TREE_CODE (orig))
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2604 case FIXED_POINT_TYPE:
2605 return build2 (COMPLEX_EXPR, type,
2606 fold_convert (TREE_TYPE (type), arg),
2607 fold_convert (TREE_TYPE (type), integer_zero_node));
2612 if (TREE_CODE (arg) == COMPLEX_EXPR)
2614 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2615 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2616 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2619 arg = save_expr (arg);
2620 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2621 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2622 rpart = fold_convert (TREE_TYPE (type), rpart);
2623 ipart = fold_convert (TREE_TYPE (type), ipart);
2624 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2632 if (integer_zerop (arg))
2633 return build_zero_vector (type);
2634 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2636 || TREE_CODE (orig) == VECTOR_TYPE);
2637 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2640 tem = fold_ignored_result (arg);
2641 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2643 return fold_build1 (NOP_EXPR, type, tem);
2650 /* Return false if expr can be assumed not to be an lvalue, true
2654 maybe_lvalue_p (const_tree x)
2656 /* We only need to wrap lvalue tree codes. */
2657 switch (TREE_CODE (x))
2668 case ALIGN_INDIRECT_REF:
2669 case MISALIGNED_INDIRECT_REF:
2671 case ARRAY_RANGE_REF:
2677 case PREINCREMENT_EXPR:
2678 case PREDECREMENT_EXPR:
2680 case TRY_CATCH_EXPR:
2681 case WITH_CLEANUP_EXPR:
2684 case GIMPLE_MODIFY_STMT:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2712 if (! maybe_lvalue_p (x))
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2726 pedantic_non_lvalue (tree x)
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2742 if (honor_nans && flag_trapping_math)
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2784 swap_tree_comparison (enum tree_code code)
2791 case UNORDERED_EXPR:
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2843 return COMPCODE_UNLT;
2845 return COMPCODE_UNEQ;
2847 return COMPCODE_UNLE;
2849 return COMPCODE_UNGT;
2851 return COMPCODE_LTGT;
2853 return COMPCODE_UNGE;
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2941 else if (flag_trapping_math)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2988 truth_value_p (enum tree_code code)
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. */
3037 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3040 /* If both types don't have the same precision, then it is not safe
3042 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3048 /* In case both args are comparisons but with different comparison
3049 code, try to swap the comparison operands of one arg to produce
3050 a match and compare that variant. */
3051 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3052 && COMPARISON_CLASS_P (arg0)
3053 && COMPARISON_CLASS_P (arg1))
3055 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3057 if (TREE_CODE (arg0) == swap_code)
3058 return operand_equal_p (TREE_OPERAND (arg0, 0),
3059 TREE_OPERAND (arg1, 1), flags)
3060 && operand_equal_p (TREE_OPERAND (arg0, 1),
3061 TREE_OPERAND (arg1, 0), flags);
3064 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3065 /* This is needed for conversions and for COMPONENT_REF.
3066 Might as well play it safe and always test this. */
3067 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3068 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3069 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3072 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3073 We don't care about side effects in that case because the SAVE_EXPR
3074 takes care of that for us. In all other cases, two expressions are
3075 equal if they have no side effects. If we have two identical
3076 expressions with side effects that should be treated the same due
3077 to the only side effects being identical SAVE_EXPR's, that will
3078 be detected in the recursive calls below. */
3079 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3080 && (TREE_CODE (arg0) == SAVE_EXPR
3081 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3084 /* Next handle constant cases, those for which we can return 1 even
3085 if ONLY_CONST is set. */
3086 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3087 switch (TREE_CODE (arg0))
3090 return tree_int_cst_equal (arg0, arg1);
3093 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3094 TREE_FIXED_CST (arg1));
3097 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3098 TREE_REAL_CST (arg1)))
3102 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3104 /* If we do not distinguish between signed and unsigned zero,
3105 consider them equal. */
3106 if (real_zerop (arg0) && real_zerop (arg1))
3115 v1 = TREE_VECTOR_CST_ELTS (arg0);
3116 v2 = TREE_VECTOR_CST_ELTS (arg1);
3119 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3122 v1 = TREE_CHAIN (v1);
3123 v2 = TREE_CHAIN (v2);
3130 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3132 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3136 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3137 && ! memcmp (TREE_STRING_POINTER (arg0),
3138 TREE_STRING_POINTER (arg1),
3139 TREE_STRING_LENGTH (arg0)));
3142 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3148 if (flags & OEP_ONLY_CONST)
3151 /* Define macros to test an operand from arg0 and arg1 for equality and a
3152 variant that allows null and views null as being different from any
3153 non-null value. In the latter case, if either is null, the both
3154 must be; otherwise, do the normal comparison. */
3155 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3156 TREE_OPERAND (arg1, N), flags)
3158 #define OP_SAME_WITH_NULL(N) \
3159 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3160 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3162 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3165 /* Two conversions are equal only if signedness and modes match. */
3166 switch (TREE_CODE (arg0))
3170 case FIX_TRUNC_EXPR:
3171 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3172 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3182 case tcc_comparison:
3184 if (OP_SAME (0) && OP_SAME (1))
3187 /* For commutative ops, allow the other order. */
3188 return (commutative_tree_code (TREE_CODE (arg0))
3189 && operand_equal_p (TREE_OPERAND (arg0, 0),
3190 TREE_OPERAND (arg1, 1), flags)
3191 && operand_equal_p (TREE_OPERAND (arg0, 1),
3192 TREE_OPERAND (arg1, 0), flags));
3195 /* If either of the pointer (or reference) expressions we are
3196 dereferencing contain a side effect, these cannot be equal. */
3197 if (TREE_SIDE_EFFECTS (arg0)
3198 || TREE_SIDE_EFFECTS (arg1))
3201 switch (TREE_CODE (arg0))
3204 case ALIGN_INDIRECT_REF:
3205 case MISALIGNED_INDIRECT_REF:
3211 case ARRAY_RANGE_REF:
3212 /* Operands 2 and 3 may be null.
3213 Compare the array index by value if it is constant first as we
3214 may have different types but same value here. */
3216 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3217 TREE_OPERAND (arg1, 1))
3219 && OP_SAME_WITH_NULL (2)
3220 && OP_SAME_WITH_NULL (3));
3223 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3224 may be NULL when we're called to compare MEM_EXPRs. */
3225 return OP_SAME_WITH_NULL (0)
3227 && OP_SAME_WITH_NULL (2);
3230 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3236 case tcc_expression:
3237 switch (TREE_CODE (arg0))
3240 case TRUTH_NOT_EXPR:
3243 case TRUTH_ANDIF_EXPR:
3244 case TRUTH_ORIF_EXPR:
3245 return OP_SAME (0) && OP_SAME (1);
3247 case TRUTH_AND_EXPR:
3249 case TRUTH_XOR_EXPR:
3250 if (OP_SAME (0) && OP_SAME (1))
3253 /* Otherwise take into account this is a commutative operation. */
3254 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3255 TREE_OPERAND (arg1, 1), flags)
3256 && operand_equal_p (TREE_OPERAND (arg0, 1),
3257 TREE_OPERAND (arg1, 0), flags));
3264 switch (TREE_CODE (arg0))
3267 /* If the CALL_EXPRs call different functions, then they
3268 clearly can not be equal. */
3269 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3274 unsigned int cef = call_expr_flags (arg0);
3275 if (flags & OEP_PURE_SAME)
3276 cef &= ECF_CONST | ECF_PURE;
3283 /* Now see if all the arguments are the same. */
3285 const_call_expr_arg_iterator iter0, iter1;
3287 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3288 a1 = first_const_call_expr_arg (arg1, &iter1);
3290 a0 = next_const_call_expr_arg (&iter0),
3291 a1 = next_const_call_expr_arg (&iter1))
3292 if (! operand_equal_p (a0, a1, flags))
3295 /* If we get here and both argument lists are exhausted
3296 then the CALL_EXPRs are equal. */
3297 return ! (a0 || a1);
3303 case tcc_declaration:
3304 /* Consider __builtin_sqrt equal to sqrt. */
3305 return (TREE_CODE (arg0) == FUNCTION_DECL
3306 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3307 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3308 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3315 #undef OP_SAME_WITH_NULL
3318 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3319 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3321 When in doubt, return 0. */
3324 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3326 int unsignedp1, unsignedpo;
3327 tree primarg0, primarg1, primother;
3328 unsigned int correct_width;
3330 if (operand_equal_p (arg0, arg1, 0))
3333 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3334 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3337 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3338 and see if the inner values are the same. This removes any
3339 signedness comparison, which doesn't matter here. */
3340 primarg0 = arg0, primarg1 = arg1;
3341 STRIP_NOPS (primarg0);
3342 STRIP_NOPS (primarg1);
3343 if (operand_equal_p (primarg0, primarg1, 0))
3346 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3347 actual comparison operand, ARG0.
3349 First throw away any conversions to wider types
3350 already present in the operands. */
3352 primarg1 = get_narrower (arg1, &unsignedp1);
3353 primother = get_narrower (other, &unsignedpo);
3355 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3356 if (unsignedp1 == unsignedpo
3357 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3358 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3360 tree type = TREE_TYPE (arg0);
3362 /* Make sure shorter operand is extended the right way
3363 to match the longer operand. */
3364 primarg1 = fold_convert (signed_or_unsigned_type_for
3365 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3367 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3374 /* See if ARG is an expression that is either a comparison or is performing
3375 arithmetic on comparisons. The comparisons must only be comparing
3376 two different values, which will be stored in *CVAL1 and *CVAL2; if
3377 they are nonzero it means that some operands have already been found.
3378 No variables may be used anywhere else in the expression except in the
3379 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3380 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3382 If this is true, return 1. Otherwise, return zero. */
3385 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3387 enum tree_code code = TREE_CODE (arg);
3388 enum tree_code_class class = TREE_CODE_CLASS (code);
3390 /* We can handle some of the tcc_expression cases here. */
3391 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3393 else if (class == tcc_expression
3394 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3395 || code == COMPOUND_EXPR))
3398 else if (class == tcc_expression && code == SAVE_EXPR
3399 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3401 /* If we've already found a CVAL1 or CVAL2, this expression is
3402 two complex to handle. */
3403 if (*cval1 || *cval2)
3413 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3416 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3417 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3418 cval1, cval2, save_p));
3423 case tcc_expression:
3424 if (code == COND_EXPR)
3425 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3426 cval1, cval2, save_p)
3427 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3428 cval1, cval2, save_p)
3429 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3430 cval1, cval2, save_p));
3433 case tcc_comparison:
3434 /* First see if we can handle the first operand, then the second. For
3435 the second operand, we know *CVAL1 can't be zero. It must be that
3436 one side of the comparison is each of the values; test for the
3437 case where this isn't true by failing if the two operands
3440 if (operand_equal_p (TREE_OPERAND (arg, 0),
3441 TREE_OPERAND (arg, 1), 0))
3445 *cval1 = TREE_OPERAND (arg, 0);
3446 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3448 else if (*cval2 == 0)
3449 *cval2 = TREE_OPERAND (arg, 0);
3450 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3455 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3457 else if (*cval2 == 0)
3458 *cval2 = TREE_OPERAND (arg, 1);
3459 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3471 /* ARG is a tree that is known to contain just arithmetic operations and
3472 comparisons. Evaluate the operations in the tree substituting NEW0 for
3473 any occurrence of OLD0 as an operand of a comparison and likewise for
3477 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3479 tree type = TREE_TYPE (arg);
3480 enum tree_code code = TREE_CODE (arg);
3481 enum tree_code_class class = TREE_CODE_CLASS (code);
3483 /* We can handle some of the tcc_expression cases here. */
3484 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3486 else if (class == tcc_expression
3487 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3493 return fold_build1 (code, type,
3494 eval_subst (TREE_OPERAND (arg, 0),
3495 old0, new0, old1, new1));
3498 return fold_build2 (code, type,
3499 eval_subst (TREE_OPERAND (arg, 0),
3500 old0, new0, old1, new1),
3501 eval_subst (TREE_OPERAND (arg, 1),
3502 old0, new0, old1, new1));
3504 case tcc_expression:
3508 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3511 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3514 return fold_build3 (code, type,
3515 eval_subst (TREE_OPERAND (arg, 0),
3516 old0, new0, old1, new1),
3517 eval_subst (TREE_OPERAND (arg, 1),
3518 old0, new0, old1, new1),
3519 eval_subst (TREE_OPERAND (arg, 2),
3520 old0, new0, old1, new1));
3524 /* Fall through - ??? */
3526 case tcc_comparison:
3528 tree arg0 = TREE_OPERAND (arg, 0);
3529 tree arg1 = TREE_OPERAND (arg, 1);
3531 /* We need to check both for exact equality and tree equality. The
3532 former will be true if the operand has a side-effect. In that
3533 case, we know the operand occurred exactly once. */
3535 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3537 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3540 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3542 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3545 return fold_build2 (code, type, arg0, arg1);
3553 /* Return a tree for the case when the result of an expression is RESULT
3554 converted to TYPE and OMITTED was previously an operand of the expression
3555 but is now not needed (e.g., we folded OMITTED * 0).
3557 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3558 the conversion of RESULT to TYPE. */
3561 omit_one_operand (tree type, tree result, tree omitted)
3563 tree t = fold_convert (type, result);
3565 /* If the resulting operand is an empty statement, just return the omitted
3566 statement casted to void. */
3567 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3568 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3570 if (TREE_SIDE_EFFECTS (omitted))
3571 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3573 return non_lvalue (t);
3576 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3579 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3581 tree t = fold_convert (type, result);
3583 /* If the resulting operand is an empty statement, just return the omitted
3584 statement casted to void. */
3585 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3586 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3588 if (TREE_SIDE_EFFECTS (omitted))
3589 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3591 return pedantic_non_lvalue (t);
3594 /* Return a tree for the case when the result of an expression is RESULT
3595 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3596 of the expression but are now not needed.
3598 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3599 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3600 evaluated before OMITTED2. Otherwise, if neither has side effects,
3601 just do the conversion of RESULT to TYPE. */
3604 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3606 tree t = fold_convert (type, result);
3608 if (TREE_SIDE_EFFECTS (omitted2))
3609 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3610 if (TREE_SIDE_EFFECTS (omitted1))
3611 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3613 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3617 /* Return a simplified tree node for the truth-negation of ARG. This
3618 never alters ARG itself. We assume that ARG is an operation that
3619 returns a truth value (0 or 1).
3621 FIXME: one would think we would fold the result, but it causes
3622 problems with the dominator optimizer. */
3625 fold_truth_not_expr (tree arg)
3627 tree type = TREE_TYPE (arg);
3628 enum tree_code code = TREE_CODE (arg);
3630 /* If this is a comparison, we can simply invert it, except for
3631 floating-point non-equality comparisons, in which case we just
3632 enclose a TRUTH_NOT_EXPR around what we have. */
3634 if (TREE_CODE_CLASS (code) == tcc_comparison)
3636 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3637 if (FLOAT_TYPE_P (op_type)
3638 && flag_trapping_math
3639 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3640 && code != NE_EXPR && code != EQ_EXPR)
3644 code = invert_tree_comparison (code,
3645 HONOR_NANS (TYPE_MODE (op_type)));
3646 if (code == ERROR_MARK)
3649 return build2 (code, type,
3650 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3657 return constant_boolean_node (integer_zerop (arg), type);
3659 case TRUTH_AND_EXPR:
3660 return build2 (TRUTH_OR_EXPR, type,
3661 invert_truthvalue (TREE_OPERAND (arg, 0)),
3662 invert_truthvalue (TREE_OPERAND (arg, 1)));
3665 return build2 (TRUTH_AND_EXPR, type,
3666 invert_truthvalue (TREE_OPERAND (arg, 0)),
3667 invert_truthvalue (TREE_OPERAND (arg, 1)));
3669 case TRUTH_XOR_EXPR:
3670 /* Here we can invert either operand. We invert the first operand
3671 unless the second operand is a TRUTH_NOT_EXPR in which case our
3672 result is the XOR of the first operand with the inside of the
3673 negation of the second operand. */
3675 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3676 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3677 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3679 return build2 (TRUTH_XOR_EXPR, type,
3680 invert_truthvalue (TREE_OPERAND (arg, 0)),
3681 TREE_OPERAND (arg, 1));
3683 case TRUTH_ANDIF_EXPR:
3684 return build2 (TRUTH_ORIF_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 invert_truthvalue (TREE_OPERAND (arg, 1)));
3688 case TRUTH_ORIF_EXPR:
3689 return build2 (TRUTH_ANDIF_EXPR, type,
3690 invert_truthvalue (TREE_OPERAND (arg, 0)),
3691 invert_truthvalue (TREE_OPERAND (arg, 1)));
3693 case TRUTH_NOT_EXPR:
3694 return TREE_OPERAND (arg, 0);
3698 tree arg1 = TREE_OPERAND (arg, 1);
3699 tree arg2 = TREE_OPERAND (arg, 2);
3700 /* A COND_EXPR may have a throw as one operand, which
3701 then has void type. Just leave void operands
3703 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3704 VOID_TYPE_P (TREE_TYPE (arg1))
3705 ? arg1 : invert_truthvalue (arg1),
3706 VOID_TYPE_P (TREE_TYPE (arg2))
3707 ? arg2 : invert_truthvalue (arg2));
3711 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3712 invert_truthvalue (TREE_OPERAND (arg, 1)));
3714 case NON_LVALUE_EXPR:
3715 return invert_truthvalue (TREE_OPERAND (arg, 0));
3718 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3719 return build1 (TRUTH_NOT_EXPR, type, arg);
3723 return build1 (TREE_CODE (arg), type,
3724 invert_truthvalue (TREE_OPERAND (arg, 0)));
3727 if (!integer_onep (TREE_OPERAND (arg, 1)))
3729 return build2 (EQ_EXPR, type, arg,
3730 build_int_cst (type, 0));
3733 return build1 (TRUTH_NOT_EXPR, type, arg);
3735 case CLEANUP_POINT_EXPR:
3736 return build1 (CLEANUP_POINT_EXPR, type,
3737 invert_truthvalue (TREE_OPERAND (arg, 0)));
3746 /* Return a simplified tree node for the truth-negation of ARG. This
3747 never alters ARG itself. We assume that ARG is an operation that
3748 returns a truth value (0 or 1).
3750 FIXME: one would think we would fold the result, but it causes
3751 problems with the dominator optimizer. */
3754 invert_truthvalue (tree arg)
3758 if (TREE_CODE (arg) == ERROR_MARK)
3761 tem = fold_truth_not_expr (arg);
3763 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3768 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3769 operands are another bit-wise operation with a common input. If so,
3770 distribute the bit operations to save an operation and possibly two if
3771 constants are involved. For example, convert
3772 (A | B) & (A | C) into A | (B & C)
3773 Further simplification will occur if B and C are constants.
3775 If this optimization cannot be done, 0 will be returned. */
3778 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3783 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3784 || TREE_CODE (arg0) == code
3785 || (TREE_CODE (arg0) != BIT_AND_EXPR
3786 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3789 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3791 common = TREE_OPERAND (arg0, 0);
3792 left = TREE_OPERAND (arg0, 1);
3793 right = TREE_OPERAND (arg1, 1);
3795 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3797 common = TREE_OPERAND (arg0, 0);
3798 left = TREE_OPERAND (arg0, 1);
3799 right = TREE_OPERAND (arg1, 0);
3801 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3803 common = TREE_OPERAND (arg0, 1);
3804 left = TREE_OPERAND (arg0, 0);
3805 right = TREE_OPERAND (arg1, 1);
3807 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3809 common = TREE_OPERAND (arg0, 1);
3810 left = TREE_OPERAND (arg0, 0);
3811 right = TREE_OPERAND (arg1, 0);
3816 return fold_build2 (TREE_CODE (arg0), type, common,
3817 fold_build2 (code, type, left, right));
3820 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3821 with code CODE. This optimization is unsafe. */
3823 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3825 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3826 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3828 /* (A / C) +- (B / C) -> (A +- B) / C. */
3830 && operand_equal_p (TREE_OPERAND (arg0, 1),
3831 TREE_OPERAND (arg1, 1), 0))
3832 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3833 fold_build2 (code, type,
3834 TREE_OPERAND (arg0, 0),
3835 TREE_OPERAND (arg1, 0)),
3836 TREE_OPERAND (arg0, 1));
3838 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3839 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3840 TREE_OPERAND (arg1, 0), 0)
3841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3842 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3844 REAL_VALUE_TYPE r0, r1;
3845 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3846 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3848 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3850 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3851 real_arithmetic (&r0, code, &r0, &r1);
3852 return fold_build2 (MULT_EXPR, type,
3853 TREE_OPERAND (arg0, 0),
3854 build_real (type, r0));
3860 /* Subroutine for fold_truthop: decode a field reference.
3862 If EXP is a comparison reference, we return the innermost reference.
3864 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3865 set to the starting bit number.
3867 If the innermost field can be completely contained in a mode-sized
3868 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3870 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3871 otherwise it is not changed.
3873 *PUNSIGNEDP is set to the signedness of the field.
3875 *PMASK is set to the mask used. This is either contained in a
3876 BIT_AND_EXPR or derived from the width of the field.
3878 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3880 Return 0 if this is not a component reference or is one that we can't
3881 do anything with. */
3884 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3885 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3886 int *punsignedp, int *pvolatilep,
3887 tree *pmask, tree *pand_mask)
3889 tree outer_type = 0;
3891 tree mask, inner, offset;
3893 unsigned int precision;
3895 /* All the optimizations using this function assume integer fields.
3896 There are problems with FP fields since the type_for_size call
3897 below can fail for, e.g., XFmode. */
3898 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3901 /* We are interested in the bare arrangement of bits, so strip everything
3902 that doesn't affect the machine mode. However, record the type of the
3903 outermost expression if it may matter below. */
3904 if (TREE_CODE (exp) == NOP_EXPR
3905 || TREE_CODE (exp) == CONVERT_EXPR
3906 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3907 outer_type = TREE_TYPE (exp);
3910 if (TREE_CODE (exp) == BIT_AND_EXPR)
3912 and_mask = TREE_OPERAND (exp, 1);
3913 exp = TREE_OPERAND (exp, 0);
3914 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3915 if (TREE_CODE (and_mask) != INTEGER_CST)
3919 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3920 punsignedp, pvolatilep, false);
3921 if ((inner == exp && and_mask == 0)
3922 || *pbitsize < 0 || offset != 0
3923 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3926 /* If the number of bits in the reference is the same as the bitsize of
3927 the outer type, then the outer type gives the signedness. Otherwise
3928 (in case of a small bitfield) the signedness is unchanged. */
3929 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3930 *punsignedp = TYPE_UNSIGNED (outer_type);
3932 /* Compute the mask to access the bitfield. */
3933 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3934 precision = TYPE_PRECISION (unsigned_type);
3936 mask = build_int_cst_type (unsigned_type, -1);
3938 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3939 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3941 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3943 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3944 fold_convert (unsigned_type, and_mask), mask);
3947 *pand_mask = and_mask;
3951 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3952 represents the sign bit of EXP's type. If EXP represents a sign
3953 or zero extension, also test VAL against the unextended type.
3954 The return value is the (sub)expression whose sign bit is VAL,
3955 or NULL_TREE otherwise. */
3958 sign_bit_p (tree exp, const_tree val)
3960 unsigned HOST_WIDE_INT mask_lo, lo;
3961 HOST_WIDE_INT mask_hi, hi;
3965 /* Tree EXP must have an integral type. */
3966 t = TREE_TYPE (exp);
3967 if (! INTEGRAL_TYPE_P (t))
3970 /* Tree VAL must be an integer constant. */
3971 if (TREE_CODE (val) != INTEGER_CST
3972 || TREE_OVERFLOW (val))
3975 width = TYPE_PRECISION (t);
3976 if (width > HOST_BITS_PER_WIDE_INT)
3978 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3981 mask_hi = ((unsigned HOST_WIDE_INT) -1
3982 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3988 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3991 mask_lo = ((unsigned HOST_WIDE_INT) -1
3992 >> (HOST_BITS_PER_WIDE_INT - width));
3995 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3996 treat VAL as if it were unsigned. */
3997 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3998 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4001 /* Handle extension from a narrower type. */
4002 if (TREE_CODE (exp) == NOP_EXPR
4003 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4004 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4009 /* Subroutine for fold_truthop: determine if an operand is simple enough
4010 to be evaluated unconditionally. */
4013 simple_operand_p (const_tree exp)
4015 /* Strip any conversions that don't change the machine mode. */
4018 return (CONSTANT_CLASS_P (exp)
4019 || TREE_CODE (exp) == SSA_NAME
4021 && ! TREE_ADDRESSABLE (exp)
4022 && ! TREE_THIS_VOLATILE (exp)
4023 && ! DECL_NONLOCAL (exp)
4024 /* Don't regard global variables as simple. They may be
4025 allocated in ways unknown to the compiler (shared memory,
4026 #pragma weak, etc). */
4027 && ! TREE_PUBLIC (exp)
4028 && ! DECL_EXTERNAL (exp)
4029 /* Loading a static variable is unduly expensive, but global
4030 registers aren't expensive. */
4031 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4034 /* The following functions are subroutines to fold_range_test and allow it to
4035 try to change a logical combination of comparisons into a range test.
4038 X == 2 || X == 3 || X == 4 || X == 5
4042 (unsigned) (X - 2) <= 3
4044 We describe each set of comparisons as being either inside or outside
4045 a range, using a variable named like IN_P, and then describe the
4046 range with a lower and upper bound. If one of the bounds is omitted,
4047 it represents either the highest or lowest value of the type.
4049 In the comments below, we represent a range by two numbers in brackets
4050 preceded by a "+" to designate being inside that range, or a "-" to
4051 designate being outside that range, so the condition can be inverted by
4052 flipping the prefix. An omitted bound is represented by a "-". For
4053 example, "- [-, 10]" means being outside the range starting at the lowest
4054 possible value and ending at 10, in other words, being greater than 10.
4055 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4058 We set up things so that the missing bounds are handled in a consistent
4059 manner so neither a missing bound nor "true" and "false" need to be
4060 handled using a special case. */
4062 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4063 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4064 and UPPER1_P are nonzero if the respective argument is an upper bound
4065 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4066 must be specified for a comparison. ARG1 will be converted to ARG0's
4067 type if both are specified. */
4070 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4071 tree arg1, int upper1_p)
4077 /* If neither arg represents infinity, do the normal operation.
4078 Else, if not a comparison, return infinity. Else handle the special
4079 comparison rules. Note that most of the cases below won't occur, but
4080 are handled for consistency. */
4082 if (arg0 != 0 && arg1 != 0)
4084 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4085 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4087 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4090 if (TREE_CODE_CLASS (code) != tcc_comparison)
4093 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4094 for neither. In real maths, we cannot assume open ended ranges are
4095 the same. But, this is computer arithmetic, where numbers are finite.
4096 We can therefore make the transformation of any unbounded range with
4097 the value Z, Z being greater than any representable number. This permits
4098 us to treat unbounded ranges as equal. */
4099 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4100 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4104 result = sgn0 == sgn1;
4107 result = sgn0 != sgn1;
4110 result = sgn0 < sgn1;
4113 result = sgn0 <= sgn1;
4116 result = sgn0 > sgn1;
4119 result = sgn0 >= sgn1;
4125 return constant_boolean_node (result, type);
4128 /* Given EXP, a logical expression, set the range it is testing into
4129 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4130 actually being tested. *PLOW and *PHIGH will be made of the same
4131 type as the returned expression. If EXP is not a comparison, we
4132 will most likely not be returning a useful value and range. Set
4133 *STRICT_OVERFLOW_P to true if the return value is only valid
4134 because signed overflow is undefined; otherwise, do not change
4135 *STRICT_OVERFLOW_P. */
4138 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4139 bool *strict_overflow_p)
4141 enum tree_code code;
4142 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4143 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4145 tree low, high, n_low, n_high;
4147 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4148 and see if we can refine the range. Some of the cases below may not
4149 happen, but it doesn't seem worth worrying about this. We "continue"
4150 the outer loop when we've changed something; otherwise we "break"
4151 the switch, which will "break" the while. */
4154 low = high = build_int_cst (TREE_TYPE (exp), 0);
4158 code = TREE_CODE (exp);
4159 exp_type = TREE_TYPE (exp);
4161 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4163 if (TREE_OPERAND_LENGTH (exp) > 0)
4164 arg0 = TREE_OPERAND (exp, 0);
4165 if (TREE_CODE_CLASS (code) == tcc_comparison
4166 || TREE_CODE_CLASS (code) == tcc_unary
4167 || TREE_CODE_CLASS (code) == tcc_binary)
4168 arg0_type = TREE_TYPE (arg0);
4169 if (TREE_CODE_CLASS (code) == tcc_binary
4170 || TREE_CODE_CLASS (code) == tcc_comparison
4171 || (TREE_CODE_CLASS (code) == tcc_expression
4172 && TREE_OPERAND_LENGTH (exp) > 1))
4173 arg1 = TREE_OPERAND (exp, 1);
4178 case TRUTH_NOT_EXPR:
4179 in_p = ! in_p, exp = arg0;
4182 case EQ_EXPR: case NE_EXPR:
4183 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4184 /* We can only do something if the range is testing for zero
4185 and if the second operand is an integer constant. Note that
4186 saying something is "in" the range we make is done by
4187 complementing IN_P since it will set in the initial case of
4188 being not equal to zero; "out" is leaving it alone. */
4189 if (low == 0 || high == 0
4190 || ! integer_zerop (low) || ! integer_zerop (high)
4191 || TREE_CODE (arg1) != INTEGER_CST)
4196 case NE_EXPR: /* - [c, c] */
4199 case EQ_EXPR: /* + [c, c] */
4200 in_p = ! in_p, low = high = arg1;
4202 case GT_EXPR: /* - [-, c] */
4203 low = 0, high = arg1;
4205 case GE_EXPR: /* + [c, -] */
4206 in_p = ! in_p, low = arg1, high = 0;
4208 case LT_EXPR: /* - [c, -] */
4209 low = arg1, high = 0;
4211 case LE_EXPR: /* + [-, c] */
4212 in_p = ! in_p, low = 0, high = arg1;
4218 /* If this is an unsigned comparison, we also know that EXP is
4219 greater than or equal to zero. We base the range tests we make
4220 on that fact, so we record it here so we can parse existing
4221 range tests. We test arg0_type since often the return type
4222 of, e.g. EQ_EXPR, is boolean. */
4223 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4225 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4227 build_int_cst (arg0_type, 0),
4231 in_p = n_in_p, low = n_low, high = n_high;
4233 /* If the high bound is missing, but we have a nonzero low
4234 bound, reverse the range so it goes from zero to the low bound
4236 if (high == 0 && low && ! integer_zerop (low))
4239 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4240 integer_one_node, 0);
4241 low = build_int_cst (arg0_type, 0);
4249 /* (-x) IN [a,b] -> x in [-b, -a] */
4250 n_low = range_binop (MINUS_EXPR, exp_type,
4251 build_int_cst (exp_type, 0),
4253 n_high = range_binop (MINUS_EXPR, exp_type,
4254 build_int_cst (exp_type, 0),
4256 low = n_low, high = n_high;
4262 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4263 build_int_cst (exp_type, 1));
4266 case PLUS_EXPR: case MINUS_EXPR:
4267 if (TREE_CODE (arg1) != INTEGER_CST)
4270 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4271 move a constant to the other side. */
4272 if (!TYPE_UNSIGNED (arg0_type)
4273 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4276 /* If EXP is signed, any overflow in the computation is undefined,
4277 so we don't worry about it so long as our computations on
4278 the bounds don't overflow. For unsigned, overflow is defined
4279 and this is exactly the right thing. */
4280 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4281 arg0_type, low, 0, arg1, 0);
4282 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4283 arg0_type, high, 1, arg1, 0);
4284 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4285 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4288 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4289 *strict_overflow_p = true;
4291 /* Check for an unsigned range which has wrapped around the maximum
4292 value thus making n_high < n_low, and normalize it. */
4293 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4295 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4296 integer_one_node, 0);
4297 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4298 integer_one_node, 0);
4300 /* If the range is of the form +/- [ x+1, x ], we won't
4301 be able to normalize it. But then, it represents the
4302 whole range or the empty set, so make it
4304 if (tree_int_cst_equal (n_low, low)
4305 && tree_int_cst_equal (n_high, high))
4311 low = n_low, high = n_high;
4316 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4317 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4320 if (! INTEGRAL_TYPE_P (arg0_type)
4321 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4322 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4325 n_low = low, n_high = high;
4328 n_low = fold_convert (arg0_type, n_low);
4331 n_high = fold_convert (arg0_type, n_high);
4334 /* If we're converting arg0 from an unsigned type, to exp,
4335 a signed type, we will be doing the comparison as unsigned.
4336 The tests above have already verified that LOW and HIGH
4339 So we have to ensure that we will handle large unsigned
4340 values the same way that the current signed bounds treat
4343 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4347 /* For fixed-point modes, we need to pass the saturating flag
4348 as the 2nd parameter. */
4349 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4350 equiv_type = lang_hooks.types.type_for_mode
4351 (TYPE_MODE (arg0_type),
4352 TYPE_SATURATING (arg0_type));
4354 equiv_type = lang_hooks.types.type_for_mode
4355 (TYPE_MODE (arg0_type), 1);
4357 /* A range without an upper bound is, naturally, unbounded.
4358 Since convert would have cropped a very large value, use
4359 the max value for the destination type. */
4361 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4362 : TYPE_MAX_VALUE (arg0_type);
4364 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4365 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4366 fold_convert (arg0_type,
4368 build_int_cst (arg0_type, 1));
4370 /* If the low bound is specified, "and" the range with the
4371 range for which the original unsigned value will be
4375 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4376 1, n_low, n_high, 1,
4377 fold_convert (arg0_type,
4382 in_p = (n_in_p == in_p);
4386 /* Otherwise, "or" the range with the range of the input
4387 that will be interpreted as negative. */
4388 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4389 0, n_low, n_high, 1,
4390 fold_convert (arg0_type,
4395 in_p = (in_p != n_in_p);
4400 low = n_low, high = n_high;
4410 /* If EXP is a constant, we can evaluate whether this is true or false. */
4411 if (TREE_CODE (exp) == INTEGER_CST)
4413 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4415 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4421 *pin_p = in_p, *plow = low, *phigh = high;
4425 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4426 type, TYPE, return an expression to test if EXP is in (or out of, depending
4427 on IN_P) the range. Return 0 if the test couldn't be created. */
4430 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4432 tree etype = TREE_TYPE (exp);
4435 #ifdef HAVE_canonicalize_funcptr_for_compare
4436 /* Disable this optimization for function pointer expressions
4437 on targets that require function pointer canonicalization. */
4438 if (HAVE_canonicalize_funcptr_for_compare
4439 && TREE_CODE (etype) == POINTER_TYPE
4440 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4446 value = build_range_check (type, exp, 1, low, high);
4448 return invert_truthvalue (value);
4453 if (low == 0 && high == 0)
4454 return build_int_cst (type, 1);
4457 return fold_build2 (LE_EXPR, type, exp,
4458 fold_convert (etype, high));
4461 return fold_build2 (GE_EXPR, type, exp,
4462 fold_convert (etype, low));
4464 if (operand_equal_p (low, high, 0))
4465 return fold_build2 (EQ_EXPR, type, exp,
4466 fold_convert (etype, low));
4468 if (integer_zerop (low))
4470 if (! TYPE_UNSIGNED (etype))
4472 etype = unsigned_type_for (etype);
4473 high = fold_convert (etype, high);
4474 exp = fold_convert (etype, exp);
4476 return build_range_check (type, exp, 1, 0, high);
4479 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4480 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4482 unsigned HOST_WIDE_INT lo;
4486 prec = TYPE_PRECISION (etype);
4487 if (prec <= HOST_BITS_PER_WIDE_INT)
4490 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4494 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4495 lo = (unsigned HOST_WIDE_INT) -1;
4498 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4500 if (TYPE_UNSIGNED (etype))
4502 etype = signed_type_for (etype);
4503 exp = fold_convert (etype, exp);
4505 return fold_build2 (GT_EXPR, type, exp,
4506 build_int_cst (etype, 0));
4510 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4511 This requires wrap-around arithmetics for the type of the expression. */
4512 switch (TREE_CODE (etype))
4515 /* There is no requirement that LOW be within the range of ETYPE
4516 if the latter is a subtype. It must, however, be within the base
4517 type of ETYPE. So be sure we do the subtraction in that type. */
4518 if (TREE_TYPE (etype))
4519 etype = TREE_TYPE (etype);
4524 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4525 TYPE_UNSIGNED (etype));
4532 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4533 if (TREE_CODE (etype) == INTEGER_TYPE
4534 && !TYPE_OVERFLOW_WRAPS (etype))
4536 tree utype, minv, maxv;
4538 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4539 for the type in question, as we rely on this here. */
4540 utype = unsigned_type_for (etype);
4541 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4542 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4543 integer_one_node, 1);
4544 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4546 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4553 high = fold_convert (etype, high);
4554 low = fold_convert (etype, low);
4555 exp = fold_convert (etype, exp);
4557 value = const_binop (MINUS_EXPR, high, low, 0);
4560 if (POINTER_TYPE_P (etype))
4562 if (value != 0 && !TREE_OVERFLOW (value))
4564 low = fold_convert (sizetype, low);
4565 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4566 return build_range_check (type,
4567 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4568 1, build_int_cst (etype, 0), value);
4573 if (value != 0 && !TREE_OVERFLOW (value))
4574 return build_range_check (type,
4575 fold_build2 (MINUS_EXPR, etype, exp, low),
4576 1, build_int_cst (etype, 0), value);
4581 /* Return the predecessor of VAL in its type, handling the infinite case. */
4584 range_predecessor (tree val)
4586 tree type = TREE_TYPE (val);
4588 if (INTEGRAL_TYPE_P (type)
4589 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4592 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4595 /* Return the successor of VAL in its type, handling the infinite case. */
4598 range_successor (tree val)
4600 tree type = TREE_TYPE (val);
4602 if (INTEGRAL_TYPE_P (type)
4603 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4606 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4609 /* Given two ranges, see if we can merge them into one. Return 1 if we
4610 can, 0 if we can't. Set the output range into the specified parameters. */
4613 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4614 tree high0, int in1_p, tree low1, tree high1)
4622 int lowequal = ((low0 == 0 && low1 == 0)
4623 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4624 low0, 0, low1, 0)));
4625 int highequal = ((high0 == 0 && high1 == 0)
4626 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4627 high0, 1, high1, 1)));
4629 /* Make range 0 be the range that starts first, or ends last if they
4630 start at the same value. Swap them if it isn't. */
4631 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4634 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4635 high1, 1, high0, 1))))
4637 temp = in0_p, in0_p = in1_p, in1_p = temp;
4638 tem = low0, low0 = low1, low1 = tem;
4639 tem = high0, high0 = high1, high1 = tem;
4642 /* Now flag two cases, whether the ranges are disjoint or whether the
4643 second range is totally subsumed in the first. Note that the tests
4644 below are simplified by the ones above. */
4645 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4646 high0, 1, low1, 0));
4647 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4648 high1, 1, high0, 1));
4650 /* We now have four cases, depending on whether we are including or
4651 excluding the two ranges. */
4654 /* If they don't overlap, the result is false. If the second range
4655 is a subset it is the result. Otherwise, the range is from the start
4656 of the second to the end of the first. */
4658 in_p = 0, low = high = 0;
4660 in_p = 1, low = low1, high = high1;
4662 in_p = 1, low = low1, high = high0;
4665 else if (in0_p && ! in1_p)
4667 /* If they don't overlap, the result is the first range. If they are
4668 equal, the result is false. If the second range is a subset of the
4669 first, and the ranges begin at the same place, we go from just after
4670 the end of the second range to the end of the first. If the second
4671 range is not a subset of the first, or if it is a subset and both
4672 ranges end at the same place, the range starts at the start of the
4673 first range and ends just before the second range.
4674 Otherwise, we can't describe this as a single range. */
4676 in_p = 1, low = low0, high = high0;
4677 else if (lowequal && highequal)
4678 in_p = 0, low = high = 0;
4679 else if (subset && lowequal)
4681 low = range_successor (high1);
4686 /* We are in the weird situation where high0 > high1 but
4687 high1 has no successor. Punt. */
4691 else if (! subset || highequal)
4694 high = range_predecessor (low1);
4698 /* low0 < low1 but low1 has no predecessor. Punt. */
4706 else if (! in0_p && in1_p)
4708 /* If they don't overlap, the result is the second range. If the second
4709 is a subset of the first, the result is false. Otherwise,
4710 the range starts just after the first range and ends at the
4711 end of the second. */
4713 in_p = 1, low = low1, high = high1;
4714 else if (subset || highequal)
4715 in_p = 0, low = high = 0;
4718 low = range_successor (high0);
4723 /* high1 > high0 but high0 has no successor. Punt. */
4731 /* The case where we are excluding both ranges. Here the complex case
4732 is if they don't overlap. In that case, the only time we have a
4733 range is if they are adjacent. If the second is a subset of the
4734 first, the result is the first. Otherwise, the range to exclude
4735 starts at the beginning of the first range and ends at the end of the
4739 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4740 range_successor (high0),
4742 in_p = 0, low = low0, high = high1;
4745 /* Canonicalize - [min, x] into - [-, x]. */
4746 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4747 switch (TREE_CODE (TREE_TYPE (low0)))
4750 if (TYPE_PRECISION (TREE_TYPE (low0))
4751 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4755 if (tree_int_cst_equal (low0,
4756 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4760 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4761 && integer_zerop (low0))
4768 /* Canonicalize - [x, max] into - [x, -]. */
4769 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4770 switch (TREE_CODE (TREE_TYPE (high1)))
4773 if (TYPE_PRECISION (TREE_TYPE (high1))
4774 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4778 if (tree_int_cst_equal (high1,
4779 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4783 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4784 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4786 integer_one_node, 1)))
4793 /* The ranges might be also adjacent between the maximum and
4794 minimum values of the given type. For
4795 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4796 return + [x + 1, y - 1]. */
4797 if (low0 == 0 && high1 == 0)
4799 low = range_successor (high0);
4800 high = range_predecessor (low1);
4801 if (low == 0 || high == 0)
4811 in_p = 0, low = low0, high = high0;
4813 in_p = 0, low = low0, high = high1;
4816 *pin_p = in_p, *plow = low, *phigh = high;
4821 /* Subroutine of fold, looking inside expressions of the form
4822 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4823 of the COND_EXPR. This function is being used also to optimize
4824 A op B ? C : A, by reversing the comparison first.
4826 Return a folded expression whose code is not a COND_EXPR
4827 anymore, or NULL_TREE if no folding opportunity is found. */
4830 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4832 enum tree_code comp_code = TREE_CODE (arg0);
4833 tree arg00 = TREE_OPERAND (arg0, 0);
4834 tree arg01 = TREE_OPERAND (arg0, 1);
4835 tree arg1_type = TREE_TYPE (arg1);
4841 /* If we have A op 0 ? A : -A, consider applying the following
4844 A == 0? A : -A same as -A
4845 A != 0? A : -A same as A
4846 A >= 0? A : -A same as abs (A)
4847 A > 0? A : -A same as abs (A)
4848 A <= 0? A : -A same as -abs (A)
4849 A < 0? A : -A same as -abs (A)
4851 None of these transformations work for modes with signed
4852 zeros. If A is +/-0, the first two transformations will
4853 change the sign of the result (from +0 to -0, or vice
4854 versa). The last four will fix the sign of the result,
4855 even though the original expressions could be positive or
4856 negative, depending on the sign of A.
4858 Note that all these transformations are correct if A is
4859 NaN, since the two alternatives (A and -A) are also NaNs. */
4860 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4861 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4862 ? real_zerop (arg01)
4863 : integer_zerop (arg01))
4864 && ((TREE_CODE (arg2) == NEGATE_EXPR
4865 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4866 /* In the case that A is of the form X-Y, '-A' (arg2) may
4867 have already been folded to Y-X, check for that. */
4868 || (TREE_CODE (arg1) == MINUS_EXPR
4869 && TREE_CODE (arg2) == MINUS_EXPR
4870 && operand_equal_p (TREE_OPERAND (arg1, 0),
4871 TREE_OPERAND (arg2, 1), 0)
4872 && operand_equal_p (TREE_OPERAND (arg1, 1),
4873 TREE_OPERAND (arg2, 0), 0))))
4878 tem = fold_convert (arg1_type, arg1);
4879 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4882 return pedantic_non_lvalue (fold_convert (type, arg1));
4885 if (flag_trapping_math)
4890 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4891 arg1 = fold_convert (signed_type_for
4892 (TREE_TYPE (arg1)), arg1);
4893 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4894 return pedantic_non_lvalue (fold_convert (type, tem));
4897 if (flag_trapping_math)
4901 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4902 arg1 = fold_convert (signed_type_for
4903 (TREE_TYPE (arg1)), arg1);
4904 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4905 return negate_expr (fold_convert (type, tem));
4907 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4911 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4912 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4913 both transformations are correct when A is NaN: A != 0
4914 is then true, and A == 0 is false. */
4916 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4917 && integer_zerop (arg01) && integer_zerop (arg2))
4919 if (comp_code == NE_EXPR)
4920 return pedantic_non_lvalue (fold_convert (type, arg1));
4921 else if (comp_code == EQ_EXPR)
4922 return build_int_cst (type, 0);
4925 /* Try some transformations of A op B ? A : B.
4927 A == B? A : B same as B
4928 A != B? A : B same as A
4929 A >= B? A : B same as max (A, B)
4930 A > B? A : B same as max (B, A)
4931 A <= B? A : B same as min (A, B)
4932 A < B? A : B same as min (B, A)
4934 As above, these transformations don't work in the presence
4935 of signed zeros. For example, if A and B are zeros of
4936 opposite sign, the first two transformations will change
4937 the sign of the result. In the last four, the original
4938 expressions give different results for (A=+0, B=-0) and
4939 (A=-0, B=+0), but the transformed expressions do not.
4941 The first two transformations are correct if either A or B
4942 is a NaN. In the first transformation, the condition will
4943 be false, and B will indeed be chosen. In the case of the
4944 second transformation, the condition A != B will be true,
4945 and A will be chosen.
4947 The conversions to max() and min() are not correct if B is
4948 a number and A is not. The conditions in the original
4949 expressions will be false, so all four give B. The min()
4950 and max() versions would give a NaN instead. */
4951 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4952 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4953 /* Avoid these transformations if the COND_EXPR may be used
4954 as an lvalue in the C++ front-end. PR c++/19199. */
4956 || (strcmp (lang_hooks.name, "GNU C++") != 0
4957 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4958 || ! maybe_lvalue_p (arg1)
4959 || ! maybe_lvalue_p (arg2)))
4961 tree comp_op0 = arg00;
4962 tree comp_op1 = arg01;
4963 tree comp_type = TREE_TYPE (comp_op0);
4965 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4966 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4976 return pedantic_non_lvalue (fold_convert (type, arg2));
4978 return pedantic_non_lvalue (fold_convert (type, arg1));
4983 /* In C++ a ?: expression can be an lvalue, so put the
4984 operand which will be used if they are equal first
4985 so that we can convert this back to the
4986 corresponding COND_EXPR. */
4987 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4989 comp_op0 = fold_convert (comp_type, comp_op0);
4990 comp_op1 = fold_convert (comp_type, comp_op1);
4991 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4992 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4993 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4994 return pedantic_non_lvalue (fold_convert (type, tem));
5001 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5003 comp_op0 = fold_convert (comp_type, comp_op0);
5004 comp_op1 = fold_convert (comp_type, comp_op1);
5005 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5006 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5007 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5008 return pedantic_non_lvalue (fold_convert (type, tem));
5012 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5013 return pedantic_non_lvalue (fold_convert (type, arg2));
5016 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5017 return pedantic_non_lvalue (fold_convert (type, arg1));
5020 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5025 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5026 we might still be able to simplify this. For example,
5027 if C1 is one less or one more than C2, this might have started
5028 out as a MIN or MAX and been transformed by this function.
5029 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5031 if (INTEGRAL_TYPE_P (type)
5032 && TREE_CODE (arg01) == INTEGER_CST
5033 && TREE_CODE (arg2) == INTEGER_CST)
5037 /* We can replace A with C1 in this case. */
5038 arg1 = fold_convert (type, arg01);
5039 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5042 /* If C1 is C2 + 1, this is min(A, C2). */
5043 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5045 && operand_equal_p (arg01,
5046 const_binop (PLUS_EXPR, arg2,
5047 build_int_cst (type, 1), 0),
5049 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5051 fold_convert (type, arg1),
5056 /* If C1 is C2 - 1, this is min(A, C2). */
5057 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5059 && operand_equal_p (arg01,
5060 const_binop (MINUS_EXPR, arg2,
5061 build_int_cst (type, 1), 0),
5063 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5065 fold_convert (type, arg1),
5070 /* If C1 is C2 - 1, this is max(A, C2). */
5071 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5073 && operand_equal_p (arg01,
5074 const_binop (MINUS_EXPR, arg2,
5075 build_int_cst (type, 1), 0),
5077 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5079 fold_convert (type, arg1),
5084 /* If C1 is C2 + 1, this is max(A, C2). */
5085 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5087 && operand_equal_p (arg01,
5088 const_binop (PLUS_EXPR, arg2,
5089 build_int_cst (type, 1), 0),
5091 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5093 fold_convert (type, arg1),
5107 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5108 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5111 /* EXP is some logical combination of boolean tests. See if we can
5112 merge it into some range test. Return the new tree if so. */
5115 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5117 int or_op = (code == TRUTH_ORIF_EXPR
5118 || code == TRUTH_OR_EXPR);
5119 int in0_p, in1_p, in_p;
5120 tree low0, low1, low, high0, high1, high;
5121 bool strict_overflow_p = false;
5122 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5123 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5125 const char * const warnmsg = G_("assuming signed overflow does not occur "
5126 "when simplifying range test");
5128 /* If this is an OR operation, invert both sides; we will invert
5129 again at the end. */
5131 in0_p = ! in0_p, in1_p = ! in1_p;
5133 /* If both expressions are the same, if we can merge the ranges, and we
5134 can build the range test, return it or it inverted. If one of the
5135 ranges is always true or always false, consider it to be the same
5136 expression as the other. */
5137 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5138 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5140 && 0 != (tem = (build_range_check (type,
5142 : rhs != 0 ? rhs : integer_zero_node,
5145 if (strict_overflow_p)
5146 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5147 return or_op ? invert_truthvalue (tem) : tem;
5150 /* On machines where the branch cost is expensive, if this is a
5151 short-circuited branch and the underlying object on both sides
5152 is the same, make a non-short-circuit operation. */
5153 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5154 && lhs != 0 && rhs != 0
5155 && (code == TRUTH_ANDIF_EXPR
5156 || code == TRUTH_ORIF_EXPR)
5157 && operand_equal_p (lhs, rhs, 0))
5159 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5160 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5161 which cases we can't do this. */
5162 if (simple_operand_p (lhs))
5163 return build2 (code == TRUTH_ANDIF_EXPR
5164 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5167 else if (lang_hooks.decls.global_bindings_p () == 0
5168 && ! CONTAINS_PLACEHOLDER_P (lhs))
5170 tree common = save_expr (lhs);
5172 if (0 != (lhs = build_range_check (type, common,
5173 or_op ? ! in0_p : in0_p,
5175 && (0 != (rhs = build_range_check (type, common,
5176 or_op ? ! in1_p : in1_p,
5179 if (strict_overflow_p)
5180 fold_overflow_warning (warnmsg,
5181 WARN_STRICT_OVERFLOW_COMPARISON);
5182 return build2 (code == TRUTH_ANDIF_EXPR
5183 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5192 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5193 bit value. Arrange things so the extra bits will be set to zero if and
5194 only if C is signed-extended to its full width. If MASK is nonzero,
5195 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5198 unextend (tree c, int p, int unsignedp, tree mask)
5200 tree type = TREE_TYPE (c);
5201 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5204 if (p == modesize || unsignedp)
5207 /* We work by getting just the sign bit into the low-order bit, then
5208 into the high-order bit, then sign-extend. We then XOR that value
5210 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5211 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5213 /* We must use a signed type in order to get an arithmetic right shift.
5214 However, we must also avoid introducing accidental overflows, so that
5215 a subsequent call to integer_zerop will work. Hence we must
5216 do the type conversion here. At this point, the constant is either
5217 zero or one, and the conversion to a signed type can never overflow.
5218 We could get an overflow if this conversion is done anywhere else. */
5219 if (TYPE_UNSIGNED (type))
5220 temp = fold_convert (signed_type_for (type), temp);
5222 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5223 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5225 temp = const_binop (BIT_AND_EXPR, temp,
5226 fold_convert (TREE_TYPE (c), mask), 0);
5227 /* If necessary, convert the type back to match the type of C. */
5228 if (TYPE_UNSIGNED (type))
5229 temp = fold_convert (type, temp);
5231 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5234 /* Find ways of folding logical expressions of LHS and RHS:
5235 Try to merge two comparisons to the same innermost item.
5236 Look for range tests like "ch >= '0' && ch <= '9'".
5237 Look for combinations of simple terms on machines with expensive branches
5238 and evaluate the RHS unconditionally.
5240 For example, if we have p->a == 2 && p->b == 4 and we can make an
5241 object large enough to span both A and B, we can do this with a comparison
5242 against the object ANDed with the a mask.
5244 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5245 operations to do this with one comparison.
5247 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5248 function and the one above.
5250 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5251 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5253 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5256 We return the simplified tree or 0 if no optimization is possible. */
5259 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5261 /* If this is the "or" of two comparisons, we can do something if
5262 the comparisons are NE_EXPR. If this is the "and", we can do something
5263 if the comparisons are EQ_EXPR. I.e.,
5264 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5266 WANTED_CODE is this operation code. For single bit fields, we can
5267 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5268 comparison for one-bit fields. */
5270 enum tree_code wanted_code;
5271 enum tree_code lcode, rcode;
5272 tree ll_arg, lr_arg, rl_arg, rr_arg;
5273 tree ll_inner, lr_inner, rl_inner, rr_inner;
5274 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5275 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5276 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5277 HOST_WIDE_INT lnbitsize, lnbitpos;
5278 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5279 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5280 enum machine_mode lnmode;
5281 tree ll_mask, lr_mask, rl_mask, rr_mask;
5282 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5283 tree l_const, r_const;
5284 tree lntype, result;
5285 int first_bit, end_bit;
5287 tree orig_lhs = lhs, orig_rhs = rhs;
5288 enum tree_code orig_code = code;
5290 /* Start by getting the comparison codes. Fail if anything is volatile.
5291 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5292 it were surrounded with a NE_EXPR. */
5294 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5297 lcode = TREE_CODE (lhs);
5298 rcode = TREE_CODE (rhs);
5300 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5302 lhs = build2 (NE_EXPR, truth_type, lhs,
5303 build_int_cst (TREE_TYPE (lhs), 0));
5307 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5309 rhs = build2 (NE_EXPR, truth_type, rhs,
5310 build_int_cst (TREE_TYPE (rhs), 0));
5314 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5315 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5318 ll_arg = TREE_OPERAND (lhs, 0);
5319 lr_arg = TREE_OPERAND (lhs, 1);
5320 rl_arg = TREE_OPERAND (rhs, 0);
5321 rr_arg = TREE_OPERAND (rhs, 1);
5323 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5324 if (simple_operand_p (ll_arg)
5325 && simple_operand_p (lr_arg))
5328 if (operand_equal_p (ll_arg, rl_arg, 0)
5329 && operand_equal_p (lr_arg, rr_arg, 0))
5331 result = combine_comparisons (code, lcode, rcode,
5332 truth_type, ll_arg, lr_arg);
5336 else if (operand_equal_p (ll_arg, rr_arg, 0)
5337 && operand_equal_p (lr_arg, rl_arg, 0))
5339 result = combine_comparisons (code, lcode,
5340 swap_tree_comparison (rcode),
5341 truth_type, ll_arg, lr_arg);
5347 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5348 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5350 /* If the RHS can be evaluated unconditionally and its operands are
5351 simple, it wins to evaluate the RHS unconditionally on machines
5352 with expensive branches. In this case, this isn't a comparison
5353 that can be merged. Avoid doing this if the RHS is a floating-point
5354 comparison since those can trap. */
5356 if (BRANCH_COST >= 2
5357 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5358 && simple_operand_p (rl_arg)
5359 && simple_operand_p (rr_arg))
5361 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5362 if (code == TRUTH_OR_EXPR
5363 && lcode == NE_EXPR && integer_zerop (lr_arg)
5364 && rcode == NE_EXPR && integer_zerop (rr_arg)
5365 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5366 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5367 return build2 (NE_EXPR, truth_type,
5368 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5370 build_int_cst (TREE_TYPE (ll_arg), 0));
5372 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5373 if (code == TRUTH_AND_EXPR
5374 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5375 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5376 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5377 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5378 return build2 (EQ_EXPR, truth_type,
5379 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5381 build_int_cst (TREE_TYPE (ll_arg), 0));
5383 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5385 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5386 return build2 (code, truth_type, lhs, rhs);
5391 /* See if the comparisons can be merged. Then get all the parameters for
5394 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5395 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5399 ll_inner = decode_field_reference (ll_arg,
5400 &ll_bitsize, &ll_bitpos, &ll_mode,
5401 &ll_unsignedp, &volatilep, &ll_mask,
5403 lr_inner = decode_field_reference (lr_arg,
5404 &lr_bitsize, &lr_bitpos, &lr_mode,
5405 &lr_unsignedp, &volatilep, &lr_mask,
5407 rl_inner = decode_field_reference (rl_arg,
5408 &rl_bitsize, &rl_bitpos, &rl_mode,
5409 &rl_unsignedp, &volatilep, &rl_mask,
5411 rr_inner = decode_field_reference (rr_arg,
5412 &rr_bitsize, &rr_bitpos, &rr_mode,
5413 &rr_unsignedp, &volatilep, &rr_mask,
5416 /* It must be true that the inner operation on the lhs of each
5417 comparison must be the same if we are to be able to do anything.
5418 Then see if we have constants. If not, the same must be true for
5420 if (volatilep || ll_inner == 0 || rl_inner == 0
5421 || ! operand_equal_p (ll_inner, rl_inner, 0))
5424 if (TREE_CODE (lr_arg) == INTEGER_CST
5425 && TREE_CODE (rr_arg) == INTEGER_CST)
5426 l_const = lr_arg, r_const = rr_arg;
5427 else if (lr_inner == 0 || rr_inner == 0
5428 || ! operand_equal_p (lr_inner, rr_inner, 0))
5431 l_const = r_const = 0;
5433 /* If either comparison code is not correct for our logical operation,
5434 fail. However, we can convert a one-bit comparison against zero into
5435 the opposite comparison against that bit being set in the field. */
5437 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5438 if (lcode != wanted_code)
5440 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5442 /* Make the left operand unsigned, since we are only interested
5443 in the value of one bit. Otherwise we are doing the wrong
5452 /* This is analogous to the code for l_const above. */
5453 if (rcode != wanted_code)
5455 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5464 /* See if we can find a mode that contains both fields being compared on
5465 the left. If we can't, fail. Otherwise, update all constants and masks
5466 to be relative to a field of that size. */
5467 first_bit = MIN (ll_bitpos, rl_bitpos);
5468 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5469 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5470 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5472 if (lnmode == VOIDmode)
5475 lnbitsize = GET_MODE_BITSIZE (lnmode);
5476 lnbitpos = first_bit & ~ (lnbitsize - 1);
5477 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5478 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5480 if (BYTES_BIG_ENDIAN)
5482 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5483 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5486 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5487 size_int (xll_bitpos), 0);
5488 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5489 size_int (xrl_bitpos), 0);
5493 l_const = fold_convert (lntype, l_const);
5494 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5495 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5496 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5497 fold_build1 (BIT_NOT_EXPR,
5501 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5503 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5508 r_const = fold_convert (lntype, r_const);
5509 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5510 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5511 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5512 fold_build1 (BIT_NOT_EXPR,
5516 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5518 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5522 /* Handle the case of comparisons with constants. If there is something in
5523 common between the masks, those bits of the constants must be the same.
5524 If not, the condition is always false. Test for this to avoid generating
5525 incorrect code below. */
5526 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5527 if (! integer_zerop (result)
5528 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5529 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5531 if (wanted_code == NE_EXPR)
5533 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5534 return constant_boolean_node (true, truth_type);
5538 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5539 return constant_boolean_node (false, truth_type);
5546 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5550 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5553 enum tree_code op_code;
5556 int consts_equal, consts_lt;
5559 STRIP_SIGN_NOPS (arg0);
5561 op_code = TREE_CODE (arg0);
5562 minmax_const = TREE_OPERAND (arg0, 1);
5563 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5564 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5565 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5566 inner = TREE_OPERAND (arg0, 0);
5568 /* If something does not permit us to optimize, return the original tree. */
5569 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5570 || TREE_CODE (comp_const) != INTEGER_CST
5571 || TREE_OVERFLOW (comp_const)
5572 || TREE_CODE (minmax_const) != INTEGER_CST
5573 || TREE_OVERFLOW (minmax_const))
5576 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5577 and GT_EXPR, doing the rest with recursive calls using logical
5581 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5583 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5586 return invert_truthvalue (tem);
5592 fold_build2 (TRUTH_ORIF_EXPR, type,
5593 optimize_minmax_comparison
5594 (EQ_EXPR, type, arg0, comp_const),
5595 optimize_minmax_comparison
5596 (GT_EXPR, type, arg0, comp_const));
5599 if (op_code == MAX_EXPR && consts_equal)
5600 /* MAX (X, 0) == 0 -> X <= 0 */
5601 return fold_build2 (LE_EXPR, type, inner, comp_const);
5603 else if (op_code == MAX_EXPR && consts_lt)
5604 /* MAX (X, 0) == 5 -> X == 5 */
5605 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5607 else if (op_code == MAX_EXPR)
5608 /* MAX (X, 0) == -1 -> false */
5609 return omit_one_operand (type, integer_zero_node, inner);
5611 else if (consts_equal)
5612 /* MIN (X, 0) == 0 -> X >= 0 */
5613 return fold_build2 (GE_EXPR, type, inner, comp_const);
5616 /* MIN (X, 0) == 5 -> false */
5617 return omit_one_operand (type, integer_zero_node, inner);
5620 /* MIN (X, 0) == -1 -> X == -1 */
5621 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5624 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5625 /* MAX (X, 0) > 0 -> X > 0
5626 MAX (X, 0) > 5 -> X > 5 */
5627 return fold_build2 (GT_EXPR, type, inner, comp_const);
5629 else if (op_code == MAX_EXPR)
5630 /* MAX (X, 0) > -1 -> true */
5631 return omit_one_operand (type, integer_one_node, inner);
5633 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5634 /* MIN (X, 0) > 0 -> false
5635 MIN (X, 0) > 5 -> false */
5636 return omit_one_operand (type, integer_zero_node, inner);
5639 /* MIN (X, 0) > -1 -> X > -1 */
5640 return fold_build2 (GT_EXPR, type, inner, comp_const);
5647 /* T is an integer expression that is being multiplied, divided, or taken a
5648 modulus (CODE says which and what kind of divide or modulus) by a
5649 constant C. See if we can eliminate that operation by folding it with
5650 other operations already in T. WIDE_TYPE, if non-null, is a type that
5651 should be used for the computation if wider than our type.
5653 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5654 (X * 2) + (Y * 4). We must, however, be assured that either the original
5655 expression would not overflow or that overflow is undefined for the type
5656 in the language in question.
5658 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5659 the machine has a multiply-accumulate insn or that this is part of an
5660 addressing calculation.
5662 If we return a non-null expression, it is an equivalent form of the
5663 original computation, but need not be in the original type.
5665 We set *STRICT_OVERFLOW_P to true if the return values depends on
5666 signed overflow being undefined. Otherwise we do not change
5667 *STRICT_OVERFLOW_P. */
5670 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5671 bool *strict_overflow_p)
5673 /* To avoid exponential search depth, refuse to allow recursion past
5674 three levels. Beyond that (1) it's highly unlikely that we'll find
5675 something interesting and (2) we've probably processed it before
5676 when we built the inner expression. */
5685 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5692 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5693 bool *strict_overflow_p)
5695 tree type = TREE_TYPE (t);
5696 enum tree_code tcode = TREE_CODE (t);
5697 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5698 > GET_MODE_SIZE (TYPE_MODE (type)))
5699 ? wide_type : type);
5701 int same_p = tcode == code;
5702 tree op0 = NULL_TREE, op1 = NULL_TREE;
5703 bool sub_strict_overflow_p;
5705 /* Don't deal with constants of zero here; they confuse the code below. */
5706 if (integer_zerop (c))
5709 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5710 op0 = TREE_OPERAND (t, 0);
5712 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5713 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5715 /* Note that we need not handle conditional operations here since fold
5716 already handles those cases. So just do arithmetic here. */
5720 /* For a constant, we can always simplify if we are a multiply
5721 or (for divide and modulus) if it is a multiple of our constant. */
5722 if (code == MULT_EXPR
5723 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5724 return const_binop (code, fold_convert (ctype, t),
5725 fold_convert (ctype, c), 0);
5728 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5729 /* If op0 is an expression ... */
5730 if ((COMPARISON_CLASS_P (op0)
5731 || UNARY_CLASS_P (op0)
5732 || BINARY_CLASS_P (op0)
5733 || VL_EXP_CLASS_P (op0)
5734 || EXPRESSION_CLASS_P (op0))
5735 /* ... and is unsigned, and its type is smaller than ctype,
5736 then we cannot pass through as widening. */
5737 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5738 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5739 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5740 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5741 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5742 /* ... or this is a truncation (t is narrower than op0),
5743 then we cannot pass through this narrowing. */
5744 || (GET_MODE_SIZE (TYPE_MODE (type))
5745 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5746 /* ... or signedness changes for division or modulus,
5747 then we cannot pass through this conversion. */
5748 || (code != MULT_EXPR
5749 && (TYPE_UNSIGNED (ctype)
5750 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5751 /* ... or has undefined overflow while the converted to
5752 type has not, we cannot do the operation in the inner type
5753 as that would introduce undefined overflow. */
5754 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5755 && !TYPE_OVERFLOW_UNDEFINED (type))))
5758 /* Pass the constant down and see if we can make a simplification. If
5759 we can, replace this expression with the inner simplification for
5760 possible later conversion to our or some other type. */
5761 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5762 && TREE_CODE (t2) == INTEGER_CST
5763 && !TREE_OVERFLOW (t2)
5764 && (0 != (t1 = extract_muldiv (op0, t2, code,
5766 ? ctype : NULL_TREE,
5767 strict_overflow_p))))
5772 /* If widening the type changes it from signed to unsigned, then we
5773 must avoid building ABS_EXPR itself as unsigned. */
5774 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5776 tree cstype = (*signed_type_for) (ctype);
5777 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5780 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5781 return fold_convert (ctype, t1);
5785 /* If the constant is negative, we cannot simplify this. */
5786 if (tree_int_cst_sgn (c) == -1)
5790 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5792 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5795 case MIN_EXPR: case MAX_EXPR:
5796 /* If widening the type changes the signedness, then we can't perform
5797 this optimization as that changes the result. */
5798 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5801 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5802 sub_strict_overflow_p = false;
5803 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5804 &sub_strict_overflow_p)) != 0
5805 && (t2 = extract_muldiv (op1, c, code, wide_type,
5806 &sub_strict_overflow_p)) != 0)
5808 if (tree_int_cst_sgn (c) < 0)
5809 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5810 if (sub_strict_overflow_p)
5811 *strict_overflow_p = true;
5812 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5813 fold_convert (ctype, t2));
5817 case LSHIFT_EXPR: case RSHIFT_EXPR:
5818 /* If the second operand is constant, this is a multiplication
5819 or floor division, by a power of two, so we can treat it that
5820 way unless the multiplier or divisor overflows. Signed
5821 left-shift overflow is implementation-defined rather than
5822 undefined in C90, so do not convert signed left shift into
5824 if (TREE_CODE (op1) == INTEGER_CST
5825 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5826 /* const_binop may not detect overflow correctly,
5827 so check for it explicitly here. */
5828 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5829 && TREE_INT_CST_HIGH (op1) == 0
5830 && 0 != (t1 = fold_convert (ctype,
5831 const_binop (LSHIFT_EXPR,
5834 && !TREE_OVERFLOW (t1))
5835 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5836 ? MULT_EXPR : FLOOR_DIV_EXPR,
5837 ctype, fold_convert (ctype, op0), t1),
5838 c, code, wide_type, strict_overflow_p);
5841 case PLUS_EXPR: case MINUS_EXPR:
5842 /* See if we can eliminate the operation on both sides. If we can, we
5843 can return a new PLUS or MINUS. If we can't, the only remaining
5844 cases where we can do anything are if the second operand is a
5846 sub_strict_overflow_p = false;
5847 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5848 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5849 if (t1 != 0 && t2 != 0
5850 && (code == MULT_EXPR
5851 /* If not multiplication, we can only do this if both operands
5852 are divisible by c. */
5853 || (multiple_of_p (ctype, op0, c)
5854 && multiple_of_p (ctype, op1, c))))
5856 if (sub_strict_overflow_p)
5857 *strict_overflow_p = true;
5858 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5859 fold_convert (ctype, t2));
5862 /* If this was a subtraction, negate OP1 and set it to be an addition.
5863 This simplifies the logic below. */
5864 if (tcode == MINUS_EXPR)
5865 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5867 if (TREE_CODE (op1) != INTEGER_CST)
5870 /* If either OP1 or C are negative, this optimization is not safe for
5871 some of the division and remainder types while for others we need
5872 to change the code. */
5873 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5875 if (code == CEIL_DIV_EXPR)
5876 code = FLOOR_DIV_EXPR;
5877 else if (code == FLOOR_DIV_EXPR)
5878 code = CEIL_DIV_EXPR;
5879 else if (code != MULT_EXPR
5880 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5884 /* If it's a multiply or a division/modulus operation of a multiple
5885 of our constant, do the operation and verify it doesn't overflow. */
5886 if (code == MULT_EXPR
5887 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5889 op1 = const_binop (code, fold_convert (ctype, op1),
5890 fold_convert (ctype, c), 0);
5891 /* We allow the constant to overflow with wrapping semantics. */
5893 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5899 /* If we have an unsigned type is not a sizetype, we cannot widen
5900 the operation since it will change the result if the original
5901 computation overflowed. */
5902 if (TYPE_UNSIGNED (ctype)
5903 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5907 /* If we were able to eliminate our operation from the first side,
5908 apply our operation to the second side and reform the PLUS. */
5909 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5910 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5912 /* The last case is if we are a multiply. In that case, we can
5913 apply the distributive law to commute the multiply and addition
5914 if the multiplication of the constants doesn't overflow. */
5915 if (code == MULT_EXPR)
5916 return fold_build2 (tcode, ctype,
5917 fold_build2 (code, ctype,
5918 fold_convert (ctype, op0),
5919 fold_convert (ctype, c)),
5925 /* We have a special case here if we are doing something like
5926 (C * 8) % 4 since we know that's zero. */
5927 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5928 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5929 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5930 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5931 return omit_one_operand (type, integer_zero_node, op0);
5933 /* ... fall through ... */
5935 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5936 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5937 /* If we can extract our operation from the LHS, do so and return a
5938 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5939 do something only if the second operand is a constant. */
5941 && (t1 = extract_muldiv (op0, c, code, wide_type,
5942 strict_overflow_p)) != 0)
5943 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5944 fold_convert (ctype, op1));
5945 else if (tcode == MULT_EXPR && code == MULT_EXPR
5946 && (t1 = extract_muldiv (op1, c, code, wide_type,
5947 strict_overflow_p)) != 0)
5948 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5949 fold_convert (ctype, t1));
5950 else if (TREE_CODE (op1) != INTEGER_CST)
5953 /* If these are the same operation types, we can associate them
5954 assuming no overflow. */
5956 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5957 fold_convert (ctype, c), 1))
5958 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5959 TREE_INT_CST_HIGH (t1),
5960 (TYPE_UNSIGNED (ctype)
5961 && tcode != MULT_EXPR) ? -1 : 1,
5962 TREE_OVERFLOW (t1)))
5963 && !TREE_OVERFLOW (t1))
5964 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5966 /* If these operations "cancel" each other, we have the main
5967 optimizations of this pass, which occur when either constant is a
5968 multiple of the other, in which case we replace this with either an
5969 operation or CODE or TCODE.
5971 If we have an unsigned type that is not a sizetype, we cannot do
5972 this since it will change the result if the original computation
5974 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5975 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5976 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5977 || (tcode == MULT_EXPR
5978 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5979 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5980 && code != MULT_EXPR)))
5982 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5984 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5985 *strict_overflow_p = true;
5986 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5987 fold_convert (ctype,
5988 const_binop (TRUNC_DIV_EXPR,
5991 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5993 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5994 *strict_overflow_p = true;
5995 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5996 fold_convert (ctype,
5997 const_binop (TRUNC_DIV_EXPR,
6010 /* Return a node which has the indicated constant VALUE (either 0 or
6011 1), and is of the indicated TYPE. */
6014 constant_boolean_node (int value, tree type)
6016 if (type == integer_type_node)
6017 return value ? integer_one_node : integer_zero_node;
6018 else if (type == boolean_type_node)
6019 return value ? boolean_true_node : boolean_false_node;
6021 return build_int_cst (type, value);
6025 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6026 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6027 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6028 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6029 COND is the first argument to CODE; otherwise (as in the example
6030 given here), it is the second argument. TYPE is the type of the
6031 original expression. Return NULL_TREE if no simplification is
6035 fold_binary_op_with_conditional_arg (enum tree_code code,
6036 tree type, tree op0, tree op1,
6037 tree cond, tree arg, int cond_first_p)
6039 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6040 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6041 tree test, true_value, false_value;
6042 tree lhs = NULL_TREE;
6043 tree rhs = NULL_TREE;
6045 /* This transformation is only worthwhile if we don't have to wrap
6046 arg in a SAVE_EXPR, and the operation can be simplified on at least
6047 one of the branches once its pushed inside the COND_EXPR. */
6048 if (!TREE_CONSTANT (arg))
6051 if (TREE_CODE (cond) == COND_EXPR)
6053 test = TREE_OPERAND (cond, 0);
6054 true_value = TREE_OPERAND (cond, 1);
6055 false_value = TREE_OPERAND (cond, 2);
6056 /* If this operand throws an expression, then it does not make
6057 sense to try to perform a logical or arithmetic operation
6059 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6061 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6066 tree testtype = TREE_TYPE (cond);
6068 true_value = constant_boolean_node (true, testtype);
6069 false_value = constant_boolean_node (false, testtype);
6072 arg = fold_convert (arg_type, arg);
6075 true_value = fold_convert (cond_type, true_value);
6077 lhs = fold_build2 (code, type, true_value, arg);
6079 lhs = fold_build2 (code, type, arg, true_value);
6083 false_value = fold_convert (cond_type, false_value);
6085 rhs = fold_build2 (code, type, false_value, arg);
6087 rhs = fold_build2 (code, type, arg, false_value);
6090 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6091 return fold_convert (type, test);
6095 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6097 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6098 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6099 ADDEND is the same as X.
6101 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6102 and finite. The problematic cases are when X is zero, and its mode
6103 has signed zeros. In the case of rounding towards -infinity,
6104 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6105 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6108 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6110 if (!real_zerop (addend))
6113 /* Don't allow the fold with -fsignaling-nans. */
6114 if (HONOR_SNANS (TYPE_MODE (type)))
6117 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6118 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6121 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6122 if (TREE_CODE (addend) == REAL_CST
6123 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6126 /* The mode has signed zeros, and we have to honor their sign.
6127 In this situation, there is only one case we can return true for.
6128 X - 0 is the same as X unless rounding towards -infinity is
6130 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6133 /* Subroutine of fold() that checks comparisons of built-in math
6134 functions against real constants.
6136 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6137 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6138 is the type of the result and ARG0 and ARG1 are the operands of the
6139 comparison. ARG1 must be a TREE_REAL_CST.
6141 The function returns the constant folded tree if a simplification
6142 can be made, and NULL_TREE otherwise. */
6145 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6146 tree type, tree arg0, tree arg1)
6150 if (BUILTIN_SQRT_P (fcode))
6152 tree arg = CALL_EXPR_ARG (arg0, 0);
6153 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6155 c = TREE_REAL_CST (arg1);
6156 if (REAL_VALUE_NEGATIVE (c))
6158 /* sqrt(x) < y is always false, if y is negative. */
6159 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6160 return omit_one_operand (type, integer_zero_node, arg);
6162 /* sqrt(x) > y is always true, if y is negative and we
6163 don't care about NaNs, i.e. negative values of x. */
6164 if (code == NE_EXPR || !HONOR_NANS (mode))
6165 return omit_one_operand (type, integer_one_node, arg);
6167 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6168 return fold_build2 (GE_EXPR, type, arg,
6169 build_real (TREE_TYPE (arg), dconst0));
6171 else if (code == GT_EXPR || code == GE_EXPR)
6175 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6176 real_convert (&c2, mode, &c2);
6178 if (REAL_VALUE_ISINF (c2))
6180 /* sqrt(x) > y is x == +Inf, when y is very large. */
6181 if (HONOR_INFINITIES (mode))
6182 return fold_build2 (EQ_EXPR, type, arg,
6183 build_real (TREE_TYPE (arg), c2));
6185 /* sqrt(x) > y is always false, when y is very large
6186 and we don't care about infinities. */
6187 return omit_one_operand (type, integer_zero_node, arg);
6190 /* sqrt(x) > c is the same as x > c*c. */
6191 return fold_build2 (code, type, arg,
6192 build_real (TREE_TYPE (arg), c2));
6194 else if (code == LT_EXPR || code == LE_EXPR)
6198 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6199 real_convert (&c2, mode, &c2);
6201 if (REAL_VALUE_ISINF (c2))
6203 /* sqrt(x) < y is always true, when y is a very large
6204 value and we don't care about NaNs or Infinities. */
6205 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6206 return omit_one_operand (type, integer_one_node, arg);
6208 /* sqrt(x) < y is x != +Inf when y is very large and we
6209 don't care about NaNs. */
6210 if (! HONOR_NANS (mode))
6211 return fold_build2 (NE_EXPR, type, arg,
6212 build_real (TREE_TYPE (arg), c2));
6214 /* sqrt(x) < y is x >= 0 when y is very large and we
6215 don't care about Infinities. */
6216 if (! HONOR_INFINITIES (mode))
6217 return fold_build2 (GE_EXPR, type, arg,
6218 build_real (TREE_TYPE (arg), dconst0));
6220 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6221 if (lang_hooks.decls.global_bindings_p () != 0
6222 || CONTAINS_PLACEHOLDER_P (arg))
6225 arg = save_expr (arg);
6226 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6227 fold_build2 (GE_EXPR, type, arg,
6228 build_real (TREE_TYPE (arg),
6230 fold_build2 (NE_EXPR, type, arg,
6231 build_real (TREE_TYPE (arg),
6235 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6236 if (! HONOR_NANS (mode))
6237 return fold_build2 (code, type, arg,
6238 build_real (TREE_TYPE (arg), c2));
6240 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6241 if (lang_hooks.decls.global_bindings_p () == 0
6242 && ! CONTAINS_PLACEHOLDER_P (arg))
6244 arg = save_expr (arg);
6245 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6246 fold_build2 (GE_EXPR, type, arg,
6247 build_real (TREE_TYPE (arg),
6249 fold_build2 (code, type, arg,
6250 build_real (TREE_TYPE (arg),
6259 /* Subroutine of fold() that optimizes comparisons against Infinities,
6260 either +Inf or -Inf.
6262 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6263 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6264 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6266 The function returns the constant folded tree if a simplification
6267 can be made, and NULL_TREE otherwise. */
6270 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6272 enum machine_mode mode;
6273 REAL_VALUE_TYPE max;
6277 mode = TYPE_MODE (TREE_TYPE (arg0));
6279 /* For negative infinity swap the sense of the comparison. */
6280 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6282 code = swap_tree_comparison (code);
6287 /* x > +Inf is always false, if with ignore sNANs. */
6288 if (HONOR_SNANS (mode))
6290 return omit_one_operand (type, integer_zero_node, arg0);
6293 /* x <= +Inf is always true, if we don't case about NaNs. */
6294 if (! HONOR_NANS (mode))
6295 return omit_one_operand (type, integer_one_node, arg0);
6297 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6298 if (lang_hooks.decls.global_bindings_p () == 0
6299 && ! CONTAINS_PLACEHOLDER_P (arg0))
6301 arg0 = save_expr (arg0);
6302 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6308 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6309 real_maxval (&max, neg, mode);
6310 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6311 arg0, build_real (TREE_TYPE (arg0), max));
6314 /* x < +Inf is always equal to x <= DBL_MAX. */
6315 real_maxval (&max, neg, mode);
6316 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6317 arg0, build_real (TREE_TYPE (arg0), max));
6320 /* x != +Inf is always equal to !(x > DBL_MAX). */
6321 real_maxval (&max, neg, mode);
6322 if (! HONOR_NANS (mode))
6323 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6324 arg0, build_real (TREE_TYPE (arg0), max));
6326 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6327 arg0, build_real (TREE_TYPE (arg0), max));
6328 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6337 /* Subroutine of fold() that optimizes comparisons of a division by
6338 a nonzero integer constant against an integer constant, i.e.
6341 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6342 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6343 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6345 The function returns the constant folded tree if a simplification
6346 can be made, and NULL_TREE otherwise. */
6349 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6351 tree prod, tmp, hi, lo;
6352 tree arg00 = TREE_OPERAND (arg0, 0);
6353 tree arg01 = TREE_OPERAND (arg0, 1);
6354 unsigned HOST_WIDE_INT lpart;
6355 HOST_WIDE_INT hpart;
6356 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6360 /* We have to do this the hard way to detect unsigned overflow.
6361 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6362 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6363 TREE_INT_CST_HIGH (arg01),
6364 TREE_INT_CST_LOW (arg1),
6365 TREE_INT_CST_HIGH (arg1),
6366 &lpart, &hpart, unsigned_p);
6367 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6369 neg_overflow = false;
6373 tmp = int_const_binop (MINUS_EXPR, arg01,
6374 build_int_cst (TREE_TYPE (arg01), 1), 0);
6377 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6378 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6379 TREE_INT_CST_HIGH (prod),
6380 TREE_INT_CST_LOW (tmp),
6381 TREE_INT_CST_HIGH (tmp),
6382 &lpart, &hpart, unsigned_p);
6383 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6384 -1, overflow | TREE_OVERFLOW (prod));
6386 else if (tree_int_cst_sgn (arg01) >= 0)
6388 tmp = int_const_binop (MINUS_EXPR, arg01,
6389 build_int_cst (TREE_TYPE (arg01), 1), 0);
6390 switch (tree_int_cst_sgn (arg1))
6393 neg_overflow = true;
6394 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6399 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6404 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6414 /* A negative divisor reverses the relational operators. */
6415 code = swap_tree_comparison (code);
6417 tmp = int_const_binop (PLUS_EXPR, arg01,
6418 build_int_cst (TREE_TYPE (arg01), 1), 0);
6419 switch (tree_int_cst_sgn (arg1))
6422 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6427 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6432 neg_overflow = true;
6433 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6445 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6446 return omit_one_operand (type, integer_zero_node, arg00);
6447 if (TREE_OVERFLOW (hi))
6448 return fold_build2 (GE_EXPR, type, arg00, lo);
6449 if (TREE_OVERFLOW (lo))
6450 return fold_build2 (LE_EXPR, type, arg00, hi);
6451 return build_range_check (type, arg00, 1, lo, hi);
6454 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6455 return omit_one_operand (type, integer_one_node, arg00);
6456 if (TREE_OVERFLOW (hi))
6457 return fold_build2 (LT_EXPR, type, arg00, lo);
6458 if (TREE_OVERFLOW (lo))
6459 return fold_build2 (GT_EXPR, type, arg00, hi);
6460 return build_range_check (type, arg00, 0, lo, hi);
6463 if (TREE_OVERFLOW (lo))
6465 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6466 return omit_one_operand (type, tmp, arg00);
6468 return fold_build2 (LT_EXPR, type, arg00, lo);
6471 if (TREE_OVERFLOW (hi))
6473 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6474 return omit_one_operand (type, tmp, arg00);
6476 return fold_build2 (LE_EXPR, type, arg00, hi);
6479 if (TREE_OVERFLOW (hi))
6481 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6482 return omit_one_operand (type, tmp, arg00);
6484 return fold_build2 (GT_EXPR, type, arg00, hi);
6487 if (TREE_OVERFLOW (lo))
6489 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6490 return omit_one_operand (type, tmp, arg00);
6492 return fold_build2 (GE_EXPR, type, arg00, lo);
6502 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6503 equality/inequality test, then return a simplified form of the test
6504 using a sign testing. Otherwise return NULL. TYPE is the desired
6508 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6511 /* If this is testing a single bit, we can optimize the test. */
6512 if ((code == NE_EXPR || code == EQ_EXPR)
6513 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6514 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6516 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6517 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6518 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6520 if (arg00 != NULL_TREE
6521 /* This is only a win if casting to a signed type is cheap,
6522 i.e. when arg00's type is not a partial mode. */
6523 && TYPE_PRECISION (TREE_TYPE (arg00))
6524 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6526 tree stype = signed_type_for (TREE_TYPE (arg00));
6527 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6528 result_type, fold_convert (stype, arg00),
6529 build_int_cst (stype, 0));
6536 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6537 equality/inequality test, then return a simplified form of
6538 the test using shifts and logical operations. Otherwise return
6539 NULL. TYPE is the desired result type. */
6542 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6545 /* If this is testing a single bit, we can optimize the test. */
6546 if ((code == NE_EXPR || code == EQ_EXPR)
6547 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6548 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6550 tree inner = TREE_OPERAND (arg0, 0);
6551 tree type = TREE_TYPE (arg0);
6552 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6553 enum machine_mode operand_mode = TYPE_MODE (type);
6555 tree signed_type, unsigned_type, intermediate_type;
6558 /* First, see if we can fold the single bit test into a sign-bit
6560 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6565 /* Otherwise we have (A & C) != 0 where C is a single bit,
6566 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6567 Similarly for (A & C) == 0. */
6569 /* If INNER is a right shift of a constant and it plus BITNUM does
6570 not overflow, adjust BITNUM and INNER. */
6571 if (TREE_CODE (inner) == RSHIFT_EXPR
6572 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6573 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6574 && bitnum < TYPE_PRECISION (type)
6575 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6576 bitnum - TYPE_PRECISION (type)))
6578 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6579 inner = TREE_OPERAND (inner, 0);
6582 /* If we are going to be able to omit the AND below, we must do our
6583 operations as unsigned. If we must use the AND, we have a choice.
6584 Normally unsigned is faster, but for some machines signed is. */
6585 #ifdef LOAD_EXTEND_OP
6586 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6587 && !flag_syntax_only) ? 0 : 1;
6592 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6593 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6594 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6595 inner = fold_convert (intermediate_type, inner);
6598 inner = build2 (RSHIFT_EXPR, intermediate_type,
6599 inner, size_int (bitnum));
6601 one = build_int_cst (intermediate_type, 1);
6603 if (code == EQ_EXPR)
6604 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6606 /* Put the AND last so it can combine with more things. */
6607 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6609 /* Make sure to return the proper type. */
6610 inner = fold_convert (result_type, inner);
6617 /* Check whether we are allowed to reorder operands arg0 and arg1,
6618 such that the evaluation of arg1 occurs before arg0. */
6621 reorder_operands_p (const_tree arg0, const_tree arg1)
6623 if (! flag_evaluation_order)
6625 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6627 return ! TREE_SIDE_EFFECTS (arg0)
6628 && ! TREE_SIDE_EFFECTS (arg1);
6631 /* Test whether it is preferable two swap two operands, ARG0 and
6632 ARG1, for example because ARG0 is an integer constant and ARG1
6633 isn't. If REORDER is true, only recommend swapping if we can
6634 evaluate the operands in reverse order. */
6637 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6639 STRIP_SIGN_NOPS (arg0);
6640 STRIP_SIGN_NOPS (arg1);
6642 if (TREE_CODE (arg1) == INTEGER_CST)
6644 if (TREE_CODE (arg0) == INTEGER_CST)
6647 if (TREE_CODE (arg1) == REAL_CST)
6649 if (TREE_CODE (arg0) == REAL_CST)
6652 if (TREE_CODE (arg1) == FIXED_CST)
6654 if (TREE_CODE (arg0) == FIXED_CST)
6657 if (TREE_CODE (arg1) == COMPLEX_CST)
6659 if (TREE_CODE (arg0) == COMPLEX_CST)
6662 if (TREE_CONSTANT (arg1))
6664 if (TREE_CONSTANT (arg0))
6670 if (reorder && flag_evaluation_order
6671 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6674 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6675 for commutative and comparison operators. Ensuring a canonical
6676 form allows the optimizers to find additional redundancies without
6677 having to explicitly check for both orderings. */
6678 if (TREE_CODE (arg0) == SSA_NAME
6679 && TREE_CODE (arg1) == SSA_NAME
6680 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6683 /* Put SSA_NAMEs last. */
6684 if (TREE_CODE (arg1) == SSA_NAME)
6686 if (TREE_CODE (arg0) == SSA_NAME)
6689 /* Put variables last. */
6698 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6699 ARG0 is extended to a wider type. */
6702 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6704 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6706 tree shorter_type, outer_type;
6710 if (arg0_unw == arg0)
6712 shorter_type = TREE_TYPE (arg0_unw);
6714 #ifdef HAVE_canonicalize_funcptr_for_compare
6715 /* Disable this optimization if we're casting a function pointer
6716 type on targets that require function pointer canonicalization. */
6717 if (HAVE_canonicalize_funcptr_for_compare
6718 && TREE_CODE (shorter_type) == POINTER_TYPE
6719 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6723 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6726 arg1_unw = get_unwidened (arg1, NULL_TREE);
6728 /* If possible, express the comparison in the shorter mode. */
6729 if ((code == EQ_EXPR || code == NE_EXPR
6730 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6731 && (TREE_TYPE (arg1_unw) == shorter_type
6732 || (TYPE_PRECISION (shorter_type)
6733 > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6734 || ((TYPE_PRECISION (shorter_type)
6735 == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6736 && (TYPE_UNSIGNED (shorter_type)
6737 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6738 || (TREE_CODE (arg1_unw) == INTEGER_CST
6739 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6740 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6741 && int_fits_type_p (arg1_unw, shorter_type))))
6742 return fold_build2 (code, type, arg0_unw,
6743 fold_convert (shorter_type, arg1_unw));
6745 if (TREE_CODE (arg1_unw) != INTEGER_CST
6746 || TREE_CODE (shorter_type) != INTEGER_TYPE
6747 || !int_fits_type_p (arg1_unw, shorter_type))
6750 /* If we are comparing with the integer that does not fit into the range
6751 of the shorter type, the result is known. */
6752 outer_type = TREE_TYPE (arg1_unw);
6753 min = lower_bound_in_type (outer_type, shorter_type);
6754 max = upper_bound_in_type (outer_type, shorter_type);
6756 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6758 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6765 return omit_one_operand (type, integer_zero_node, arg0);
6770 return omit_one_operand (type, integer_one_node, arg0);
6776 return omit_one_operand (type, integer_one_node, arg0);
6778 return omit_one_operand (type, integer_zero_node, arg0);
6783 return omit_one_operand (type, integer_zero_node, arg0);
6785 return omit_one_operand (type, integer_one_node, arg0);
6794 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6795 ARG0 just the signedness is changed. */
6798 fold_sign_changed_comparison (enum tree_code code, tree type,
6799 tree arg0, tree arg1)
6802 tree inner_type, outer_type;
6804 if (TREE_CODE (arg0) != NOP_EXPR
6805 && TREE_CODE (arg0) != CONVERT_EXPR)
6808 outer_type = TREE_TYPE (arg0);
6809 arg0_inner = TREE_OPERAND (arg0, 0);
6810 inner_type = TREE_TYPE (arg0_inner);
6812 #ifdef HAVE_canonicalize_funcptr_for_compare
6813 /* Disable this optimization if we're casting a function pointer
6814 type on targets that require function pointer canonicalization. */
6815 if (HAVE_canonicalize_funcptr_for_compare
6816 && TREE_CODE (inner_type) == POINTER_TYPE
6817 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6821 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6824 /* If the conversion is from an integral subtype to its basetype
6826 if (TREE_TYPE (inner_type) == outer_type)
6829 if (TREE_CODE (arg1) != INTEGER_CST
6830 && !((TREE_CODE (arg1) == NOP_EXPR
6831 || TREE_CODE (arg1) == CONVERT_EXPR)
6832 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6835 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6840 if (TREE_CODE (arg1) == INTEGER_CST)
6841 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6842 TREE_INT_CST_HIGH (arg1), 0,
6843 TREE_OVERFLOW (arg1));
6845 arg1 = fold_convert (inner_type, arg1);
6847 return fold_build2 (code, type, arg0_inner, arg1);
6850 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6851 step of the array. Reconstructs s and delta in the case of s * delta
6852 being an integer constant (and thus already folded).
6853 ADDR is the address. MULT is the multiplicative expression.
6854 If the function succeeds, the new address expression is returned. Otherwise
6855 NULL_TREE is returned. */
6858 try_move_mult_to_index (tree addr, tree op1)
6860 tree s, delta, step;
6861 tree ref = TREE_OPERAND (addr, 0), pref;
6866 /* Strip the nops that might be added when converting op1 to sizetype. */
6869 /* Canonicalize op1 into a possibly non-constant delta
6870 and an INTEGER_CST s. */
6871 if (TREE_CODE (op1) == MULT_EXPR)
6873 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6878 if (TREE_CODE (arg0) == INTEGER_CST)
6883 else if (TREE_CODE (arg1) == INTEGER_CST)
6891 else if (TREE_CODE (op1) == INTEGER_CST)
6898 /* Simulate we are delta * 1. */
6900 s = integer_one_node;
6903 for (;; ref = TREE_OPERAND (ref, 0))
6905 if (TREE_CODE (ref) == ARRAY_REF)
6907 /* Remember if this was a multi-dimensional array. */
6908 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6911 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6915 step = array_ref_element_size (ref);
6916 if (TREE_CODE (step) != INTEGER_CST)
6921 if (! tree_int_cst_equal (step, s))
6926 /* Try if delta is a multiple of step. */
6927 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6933 /* Only fold here if we can verify we do not overflow one
6934 dimension of a multi-dimensional array. */
6939 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6940 || !INTEGRAL_TYPE_P (itype)
6941 || !TYPE_MAX_VALUE (itype)
6942 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6945 tmp = fold_binary (PLUS_EXPR, itype,
6946 fold_convert (itype,
6947 TREE_OPERAND (ref, 1)),
6948 fold_convert (itype, delta));
6950 || TREE_CODE (tmp) != INTEGER_CST
6951 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6960 if (!handled_component_p (ref))
6964 /* We found the suitable array reference. So copy everything up to it,
6965 and replace the index. */
6967 pref = TREE_OPERAND (addr, 0);
6968 ret = copy_node (pref);
6973 pref = TREE_OPERAND (pref, 0);
6974 TREE_OPERAND (pos, 0) = copy_node (pref);
6975 pos = TREE_OPERAND (pos, 0);
6978 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6979 fold_convert (itype,
6980 TREE_OPERAND (pos, 1)),
6981 fold_convert (itype, delta));
6983 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6987 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6988 means A >= Y && A != MAX, but in this case we know that
6989 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6992 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6994 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6996 if (TREE_CODE (bound) == LT_EXPR)
6997 a = TREE_OPERAND (bound, 0);
6998 else if (TREE_CODE (bound) == GT_EXPR)
6999 a = TREE_OPERAND (bound, 1);
7003 typea = TREE_TYPE (a);
7004 if (!INTEGRAL_TYPE_P (typea)
7005 && !POINTER_TYPE_P (typea))
7008 if (TREE_CODE (ineq) == LT_EXPR)
7010 a1 = TREE_OPERAND (ineq, 1);
7011 y = TREE_OPERAND (ineq, 0);
7013 else if (TREE_CODE (ineq) == GT_EXPR)
7015 a1 = TREE_OPERAND (ineq, 0);
7016 y = TREE_OPERAND (ineq, 1);
7021 if (TREE_TYPE (a1) != typea)
7024 if (POINTER_TYPE_P (typea))
7026 /* Convert the pointer types into integer before taking the difference. */
7027 tree ta = fold_convert (ssizetype, a);
7028 tree ta1 = fold_convert (ssizetype, a1);
7029 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7032 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7034 if (!diff || !integer_onep (diff))
7037 return fold_build2 (GE_EXPR, type, a, y);
7040 /* Fold a sum or difference of at least one multiplication.
7041 Returns the folded tree or NULL if no simplification could be made. */
7044 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7046 tree arg00, arg01, arg10, arg11;
7047 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7049 /* (A * C) +- (B * C) -> (A+-B) * C.
7050 (A * C) +- A -> A * (C+-1).
7051 We are most concerned about the case where C is a constant,
7052 but other combinations show up during loop reduction. Since
7053 it is not difficult, try all four possibilities. */
7055 if (TREE_CODE (arg0) == MULT_EXPR)
7057 arg00 = TREE_OPERAND (arg0, 0);
7058 arg01 = TREE_OPERAND (arg0, 1);
7060 else if (TREE_CODE (arg0) == INTEGER_CST)
7062 arg00 = build_one_cst (type);
7067 /* We cannot generate constant 1 for fract. */
7068 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7071 arg01 = build_one_cst (type);
7073 if (TREE_CODE (arg1) == MULT_EXPR)
7075 arg10 = TREE_OPERAND (arg1, 0);
7076 arg11 = TREE_OPERAND (arg1, 1);
7078 else if (TREE_CODE (arg1) == INTEGER_CST)
7080 arg10 = build_one_cst (type);
7085 /* We cannot generate constant 1 for fract. */
7086 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7089 arg11 = build_one_cst (type);
7093 if (operand_equal_p (arg01, arg11, 0))
7094 same = arg01, alt0 = arg00, alt1 = arg10;
7095 else if (operand_equal_p (arg00, arg10, 0))
7096 same = arg00, alt0 = arg01, alt1 = arg11;
7097 else if (operand_equal_p (arg00, arg11, 0))
7098 same = arg00, alt0 = arg01, alt1 = arg10;
7099 else if (operand_equal_p (arg01, arg10, 0))
7100 same = arg01, alt0 = arg00, alt1 = arg11;
7102 /* No identical multiplicands; see if we can find a common
7103 power-of-two factor in non-power-of-two multiplies. This
7104 can help in multi-dimensional array access. */
7105 else if (host_integerp (arg01, 0)
7106 && host_integerp (arg11, 0))
7108 HOST_WIDE_INT int01, int11, tmp;
7111 int01 = TREE_INT_CST_LOW (arg01);
7112 int11 = TREE_INT_CST_LOW (arg11);
7114 /* Move min of absolute values to int11. */
7115 if ((int01 >= 0 ? int01 : -int01)
7116 < (int11 >= 0 ? int11 : -int11))
7118 tmp = int01, int01 = int11, int11 = tmp;
7119 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7126 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7128 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7129 build_int_cst (TREE_TYPE (arg00),
7134 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7139 return fold_build2 (MULT_EXPR, type,
7140 fold_build2 (code, type,
7141 fold_convert (type, alt0),
7142 fold_convert (type, alt1)),
7143 fold_convert (type, same));
7148 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7154 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7156 tree type = TREE_TYPE (expr);
7157 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7158 int byte, offset, word, words;
7159 unsigned char value;
7161 if (total_bytes > len)
7163 words = total_bytes / UNITS_PER_WORD;
7165 for (byte = 0; byte < total_bytes; byte++)
7167 int bitpos = byte * BITS_PER_UNIT;
7168 if (bitpos < HOST_BITS_PER_WIDE_INT)
7169 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7171 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7172 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7174 if (total_bytes > UNITS_PER_WORD)
7176 word = byte / UNITS_PER_WORD;
7177 if (WORDS_BIG_ENDIAN)
7178 word = (words - 1) - word;
7179 offset = word * UNITS_PER_WORD;
7180 if (BYTES_BIG_ENDIAN)
7181 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7183 offset += byte % UNITS_PER_WORD;
7186 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7187 ptr[offset] = value;
7193 /* Subroutine of native_encode_expr. Encode the REAL_CST
7194 specified by EXPR into the buffer PTR of length LEN bytes.
7195 Return the number of bytes placed in the buffer, or zero
7199 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7201 tree type = TREE_TYPE (expr);
7202 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7203 int byte, offset, word, words, bitpos;
7204 unsigned char value;
7206 /* There are always 32 bits in each long, no matter the size of
7207 the hosts long. We handle floating point representations with
7211 if (total_bytes > len)
7213 words = 32 / UNITS_PER_WORD;
7215 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7217 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7218 bitpos += BITS_PER_UNIT)
7220 byte = (bitpos / BITS_PER_UNIT) & 3;
7221 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7223 if (UNITS_PER_WORD < 4)
7225 word = byte / UNITS_PER_WORD;
7226 if (WORDS_BIG_ENDIAN)
7227 word = (words - 1) - word;
7228 offset = word * UNITS_PER_WORD;
7229 if (BYTES_BIG_ENDIAN)
7230 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7232 offset += byte % UNITS_PER_WORD;
7235 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7236 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7241 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7242 specified by EXPR into the buffer PTR of length LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero
7247 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7252 part = TREE_REALPART (expr);
7253 rsize = native_encode_expr (part, ptr, len);
7256 part = TREE_IMAGPART (expr);
7257 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7260 return rsize + isize;
7264 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7265 specified by EXPR into the buffer PTR of length LEN bytes.
7266 Return the number of bytes placed in the buffer, or zero
7270 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7272 int i, size, offset, count;
7273 tree itype, elem, elements;
7276 elements = TREE_VECTOR_CST_ELTS (expr);
7277 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7278 itype = TREE_TYPE (TREE_TYPE (expr));
7279 size = GET_MODE_SIZE (TYPE_MODE (itype));
7280 for (i = 0; i < count; i++)
7284 elem = TREE_VALUE (elements);
7285 elements = TREE_CHAIN (elements);
7292 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7297 if (offset + size > len)
7299 memset (ptr+offset, 0, size);
7307 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7308 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7309 buffer PTR of length LEN bytes. Return the number of bytes
7310 placed in the buffer, or zero upon failure. */
7313 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7315 switch (TREE_CODE (expr))
7318 return native_encode_int (expr, ptr, len);
7321 return native_encode_real (expr, ptr, len);
7324 return native_encode_complex (expr, ptr, len);
7327 return native_encode_vector (expr, ptr, len);
7335 /* Subroutine of native_interpret_expr. Interpret the contents of
7336 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7337 If the buffer cannot be interpreted, return NULL_TREE. */
7340 native_interpret_int (tree type, const unsigned char *ptr, int len)
7342 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7343 int byte, offset, word, words;
7344 unsigned char value;
7345 unsigned int HOST_WIDE_INT lo = 0;
7346 HOST_WIDE_INT hi = 0;
7348 if (total_bytes > len)
7350 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7352 words = total_bytes / UNITS_PER_WORD;
7354 for (byte = 0; byte < total_bytes; byte++)
7356 int bitpos = byte * BITS_PER_UNIT;
7357 if (total_bytes > UNITS_PER_WORD)
7359 word = byte / UNITS_PER_WORD;
7360 if (WORDS_BIG_ENDIAN)
7361 word = (words - 1) - word;
7362 offset = word * UNITS_PER_WORD;
7363 if (BYTES_BIG_ENDIAN)
7364 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7366 offset += byte % UNITS_PER_WORD;
7369 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7370 value = ptr[offset];
7372 if (bitpos < HOST_BITS_PER_WIDE_INT)
7373 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7375 hi |= (unsigned HOST_WIDE_INT) value
7376 << (bitpos - HOST_BITS_PER_WIDE_INT);
7379 return build_int_cst_wide_type (type, lo, hi);
7383 /* Subroutine of native_interpret_expr. Interpret the contents of
7384 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7385 If the buffer cannot be interpreted, return NULL_TREE. */
7388 native_interpret_real (tree type, const unsigned char *ptr, int len)
7390 enum machine_mode mode = TYPE_MODE (type);
7391 int total_bytes = GET_MODE_SIZE (mode);
7392 int byte, offset, word, words, bitpos;
7393 unsigned char value;
7394 /* There are always 32 bits in each long, no matter the size of
7395 the hosts long. We handle floating point representations with
7400 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7401 if (total_bytes > len || total_bytes > 24)
7403 words = 32 / UNITS_PER_WORD;
7405 memset (tmp, 0, sizeof (tmp));
7406 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7407 bitpos += BITS_PER_UNIT)
7409 byte = (bitpos / BITS_PER_UNIT) & 3;
7410 if (UNITS_PER_WORD < 4)
7412 word = byte / UNITS_PER_WORD;
7413 if (WORDS_BIG_ENDIAN)
7414 word = (words - 1) - word;
7415 offset = word * UNITS_PER_WORD;
7416 if (BYTES_BIG_ENDIAN)
7417 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7419 offset += byte % UNITS_PER_WORD;
7422 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7423 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7425 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7428 real_from_target (&r, tmp, mode);
7429 return build_real (type, r);
7433 /* Subroutine of native_interpret_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7435 If the buffer cannot be interpreted, return NULL_TREE. */
7438 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7440 tree etype, rpart, ipart;
7443 etype = TREE_TYPE (type);
7444 size = GET_MODE_SIZE (TYPE_MODE (etype));
7447 rpart = native_interpret_expr (etype, ptr, size);
7450 ipart = native_interpret_expr (etype, ptr+size, size);
7453 return build_complex (type, rpart, ipart);
7457 /* Subroutine of native_interpret_expr. Interpret the contents of
7458 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7459 If the buffer cannot be interpreted, return NULL_TREE. */
7462 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7464 tree etype, elem, elements;
7467 etype = TREE_TYPE (type);
7468 size = GET_MODE_SIZE (TYPE_MODE (etype));
7469 count = TYPE_VECTOR_SUBPARTS (type);
7470 if (size * count > len)
7473 elements = NULL_TREE;
7474 for (i = count - 1; i >= 0; i--)
7476 elem = native_interpret_expr (etype, ptr+(i*size), size);
7479 elements = tree_cons (NULL_TREE, elem, elements);
7481 return build_vector (type, elements);
7485 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7486 the buffer PTR of length LEN as a constant of type TYPE. For
7487 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7488 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7489 return NULL_TREE. */
7492 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7494 switch (TREE_CODE (type))
7499 return native_interpret_int (type, ptr, len);
7502 return native_interpret_real (type, ptr, len);
7505 return native_interpret_complex (type, ptr, len);
7508 return native_interpret_vector (type, ptr, len);
7516 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7517 TYPE at compile-time. If we're unable to perform the conversion
7518 return NULL_TREE. */
7521 fold_view_convert_expr (tree type, tree expr)
7523 /* We support up to 512-bit values (for V8DFmode). */
7524 unsigned char buffer[64];
7527 /* Check that the host and target are sane. */
7528 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7531 len = native_encode_expr (expr, buffer, sizeof (buffer));
7535 return native_interpret_expr (type, buffer, len);
7538 /* Build an expression for the address of T. Folds away INDIRECT_REF
7539 to avoid confusing the gimplify process. When IN_FOLD is true
7540 avoid modifications of T. */
7543 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7545 /* The size of the object is not relevant when talking about its address. */
7546 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7547 t = TREE_OPERAND (t, 0);
7549 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7550 if (TREE_CODE (t) == INDIRECT_REF
7551 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7553 t = TREE_OPERAND (t, 0);
7555 if (TREE_TYPE (t) != ptrtype)
7556 t = build1 (NOP_EXPR, ptrtype, t);
7562 while (handled_component_p (base))
7563 base = TREE_OPERAND (base, 0);
7566 TREE_ADDRESSABLE (base) = 1;
7568 t = build1 (ADDR_EXPR, ptrtype, t);
7571 t = build1 (ADDR_EXPR, ptrtype, t);
7576 /* Build an expression for the address of T with type PTRTYPE. This
7577 function modifies the input parameter 'T' by sometimes setting the
7578 TREE_ADDRESSABLE flag. */
7581 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7583 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7586 /* Build an expression for the address of T. This function modifies
7587 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7588 flag. When called from fold functions, use fold_addr_expr instead. */
7591 build_fold_addr_expr (tree t)
7593 return build_fold_addr_expr_with_type_1 (t,
7594 build_pointer_type (TREE_TYPE (t)),
7598 /* Same as build_fold_addr_expr, builds an expression for the address
7599 of T, but avoids touching the input node 't'. Fold functions
7600 should use this version. */
7603 fold_addr_expr (tree t)
7605 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7607 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7610 /* Fold a unary expression of code CODE and type TYPE with operand
7611 OP0. Return the folded expression if folding is successful.
7612 Otherwise, return NULL_TREE. */
7615 fold_unary (enum tree_code code, tree type, tree op0)
7619 enum tree_code_class kind = TREE_CODE_CLASS (code);
7621 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7622 && TREE_CODE_LENGTH (code) == 1);
7627 if (code == NOP_EXPR || code == CONVERT_EXPR
7628 || code == FLOAT_EXPR || code == ABS_EXPR)
7630 /* Don't use STRIP_NOPS, because signedness of argument type
7632 STRIP_SIGN_NOPS (arg0);
7636 /* Strip any conversions that don't change the mode. This
7637 is safe for every expression, except for a comparison
7638 expression because its signedness is derived from its
7641 Note that this is done as an internal manipulation within
7642 the constant folder, in order to find the simplest
7643 representation of the arguments so that their form can be
7644 studied. In any cases, the appropriate type conversions
7645 should be put back in the tree that will get out of the
7651 if (TREE_CODE_CLASS (code) == tcc_unary)
7653 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7654 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7655 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7656 else if (TREE_CODE (arg0) == COND_EXPR)
7658 tree arg01 = TREE_OPERAND (arg0, 1);
7659 tree arg02 = TREE_OPERAND (arg0, 2);
7660 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7661 arg01 = fold_build1 (code, type, arg01);
7662 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7663 arg02 = fold_build1 (code, type, arg02);
7664 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7667 /* If this was a conversion, and all we did was to move into
7668 inside the COND_EXPR, bring it back out. But leave it if
7669 it is a conversion from integer to integer and the
7670 result precision is no wider than a word since such a
7671 conversion is cheap and may be optimized away by combine,
7672 while it couldn't if it were outside the COND_EXPR. Then return
7673 so we don't get into an infinite recursion loop taking the
7674 conversion out and then back in. */
7676 if ((code == NOP_EXPR || code == CONVERT_EXPR
7677 || code == NON_LVALUE_EXPR)
7678 && TREE_CODE (tem) == COND_EXPR
7679 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7680 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7681 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7682 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7683 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7684 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7685 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7687 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7688 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7689 || flag_syntax_only))
7690 tem = build1 (code, type,
7692 TREE_TYPE (TREE_OPERAND
7693 (TREE_OPERAND (tem, 1), 0)),
7694 TREE_OPERAND (tem, 0),
7695 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7696 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7699 else if (COMPARISON_CLASS_P (arg0))
7701 if (TREE_CODE (type) == BOOLEAN_TYPE)
7703 arg0 = copy_node (arg0);
7704 TREE_TYPE (arg0) = type;
7707 else if (TREE_CODE (type) != INTEGER_TYPE)
7708 return fold_build3 (COND_EXPR, type, arg0,
7709 fold_build1 (code, type,
7711 fold_build1 (code, type,
7712 integer_zero_node));
7719 /* Re-association barriers around constants and other re-association
7720 barriers can be removed. */
7721 if (CONSTANT_CLASS_P (op0)
7722 || TREE_CODE (op0) == PAREN_EXPR)
7723 return fold_convert (type, op0);
7729 case FIX_TRUNC_EXPR:
7730 if (TREE_TYPE (op0) == type)
7733 /* If we have (type) (a CMP b) and type is an integral type, return
7734 new expression involving the new type. */
7735 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7736 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7737 TREE_OPERAND (op0, 1));
7739 /* Handle cases of two conversions in a row. */
7740 if (TREE_CODE (op0) == NOP_EXPR
7741 || TREE_CODE (op0) == CONVERT_EXPR)
7743 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7744 tree inter_type = TREE_TYPE (op0);
7745 int inside_int = INTEGRAL_TYPE_P (inside_type);
7746 int inside_ptr = POINTER_TYPE_P (inside_type);
7747 int inside_float = FLOAT_TYPE_P (inside_type);
7748 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7749 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7750 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7751 int inter_int = INTEGRAL_TYPE_P (inter_type);
7752 int inter_ptr = POINTER_TYPE_P (inter_type);
7753 int inter_float = FLOAT_TYPE_P (inter_type);
7754 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7755 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7756 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7757 int final_int = INTEGRAL_TYPE_P (type);
7758 int final_ptr = POINTER_TYPE_P (type);
7759 int final_float = FLOAT_TYPE_P (type);
7760 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7761 unsigned int final_prec = TYPE_PRECISION (type);
7762 int final_unsignedp = TYPE_UNSIGNED (type);
7764 /* In addition to the cases of two conversions in a row
7765 handled below, if we are converting something to its own
7766 type via an object of identical or wider precision, neither
7767 conversion is needed. */
7768 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7769 && (((inter_int || inter_ptr) && final_int)
7770 || (inter_float && final_float))
7771 && inter_prec >= final_prec)
7772 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7774 /* Likewise, if the intermediate and final types are either both
7775 float or both integer, we don't need the middle conversion if
7776 it is wider than the final type and doesn't change the signedness
7777 (for integers). Avoid this if the final type is a pointer
7778 since then we sometimes need the inner conversion. Likewise if
7779 the outer has a precision not equal to the size of its mode. */
7780 if (((inter_int && inside_int)
7781 || (inter_float && inside_float)
7782 || (inter_vec && inside_vec))
7783 && inter_prec >= inside_prec
7784 && (inter_float || inter_vec
7785 || inter_unsignedp == inside_unsignedp)
7786 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7787 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7789 && (! final_vec || inter_prec == inside_prec))
7790 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7792 /* If we have a sign-extension of a zero-extended value, we can
7793 replace that by a single zero-extension. */
7794 if (inside_int && inter_int && final_int
7795 && inside_prec < inter_prec && inter_prec < final_prec
7796 && inside_unsignedp && !inter_unsignedp)
7797 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7799 /* Two conversions in a row are not needed unless:
7800 - some conversion is floating-point (overstrict for now), or
7801 - some conversion is a vector (overstrict for now), or
7802 - the intermediate type is narrower than both initial and
7804 - the intermediate type and innermost type differ in signedness,
7805 and the outermost type is wider than the intermediate, or
7806 - the initial type is a pointer type and the precisions of the
7807 intermediate and final types differ, or
7808 - the final type is a pointer type and the precisions of the
7809 initial and intermediate types differ. */
7810 if (! inside_float && ! inter_float && ! final_float
7811 && ! inside_vec && ! inter_vec && ! final_vec
7812 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7813 && ! (inside_int && inter_int
7814 && inter_unsignedp != inside_unsignedp
7815 && inter_prec < final_prec)
7816 && ((inter_unsignedp && inter_prec > inside_prec)
7817 == (final_unsignedp && final_prec > inter_prec))
7818 && ! (inside_ptr && inter_prec != final_prec)
7819 && ! (final_ptr && inside_prec != inter_prec)
7820 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7821 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7822 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7825 /* Handle (T *)&A.B.C for A being of type T and B and C
7826 living at offset zero. This occurs frequently in
7827 C++ upcasting and then accessing the base. */
7828 if (TREE_CODE (op0) == ADDR_EXPR
7829 && POINTER_TYPE_P (type)
7830 && handled_component_p (TREE_OPERAND (op0, 0)))
7832 HOST_WIDE_INT bitsize, bitpos;
7834 enum machine_mode mode;
7835 int unsignedp, volatilep;
7836 tree base = TREE_OPERAND (op0, 0);
7837 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7838 &mode, &unsignedp, &volatilep, false);
7839 /* If the reference was to a (constant) zero offset, we can use
7840 the address of the base if it has the same base type
7841 as the result type. */
7842 if (! offset && bitpos == 0
7843 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7844 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7845 return fold_convert (type, fold_addr_expr (base));
7848 if ((TREE_CODE (op0) == MODIFY_EXPR
7849 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7850 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7851 /* Detect assigning a bitfield. */
7852 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7854 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7856 /* Don't leave an assignment inside a conversion
7857 unless assigning a bitfield. */
7858 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7859 /* First do the assignment, then return converted constant. */
7860 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7861 TREE_NO_WARNING (tem) = 1;
7862 TREE_USED (tem) = 1;
7866 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7867 constants (if x has signed type, the sign bit cannot be set
7868 in c). This folds extension into the BIT_AND_EXPR. */
7869 if (INTEGRAL_TYPE_P (type)
7870 && TREE_CODE (type) != BOOLEAN_TYPE
7871 && TREE_CODE (op0) == BIT_AND_EXPR
7872 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7875 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7878 if (TYPE_UNSIGNED (TREE_TYPE (and))
7879 || (TYPE_PRECISION (type)
7880 <= TYPE_PRECISION (TREE_TYPE (and))))
7882 else if (TYPE_PRECISION (TREE_TYPE (and1))
7883 <= HOST_BITS_PER_WIDE_INT
7884 && host_integerp (and1, 1))
7886 unsigned HOST_WIDE_INT cst;
7888 cst = tree_low_cst (and1, 1);
7889 cst &= (HOST_WIDE_INT) -1
7890 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7891 change = (cst == 0);
7892 #ifdef LOAD_EXTEND_OP
7894 && !flag_syntax_only
7895 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7898 tree uns = unsigned_type_for (TREE_TYPE (and0));
7899 and0 = fold_convert (uns, and0);
7900 and1 = fold_convert (uns, and1);
7906 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7907 TREE_INT_CST_HIGH (and1), 0,
7908 TREE_OVERFLOW (and1));
7909 return fold_build2 (BIT_AND_EXPR, type,
7910 fold_convert (type, and0), tem);
7914 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7915 when one of the new casts will fold away. Conservatively we assume
7916 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7917 if (POINTER_TYPE_P (type)
7918 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7919 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7920 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7921 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7923 tree arg00 = TREE_OPERAND (arg0, 0);
7924 tree arg01 = TREE_OPERAND (arg0, 1);
7926 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7927 fold_convert (sizetype, arg01));
7930 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7931 of the same precision, and X is an integer type not narrower than
7932 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7933 if (INTEGRAL_TYPE_P (type)
7934 && TREE_CODE (op0) == BIT_NOT_EXPR
7935 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7936 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7937 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7938 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7940 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7941 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7942 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7943 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7946 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7947 type of X and Y (integer types only). */
7948 if (INTEGRAL_TYPE_P (type)
7949 && TREE_CODE (op0) == MULT_EXPR
7950 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7951 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7953 /* Be careful not to introduce new overflows. */
7955 if (TYPE_OVERFLOW_WRAPS (type))
7958 mult_type = unsigned_type_for (type);
7960 tem = fold_build2 (MULT_EXPR, mult_type,
7961 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7962 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7963 return fold_convert (type, tem);
7966 tem = fold_convert_const (code, type, op0);
7967 return tem ? tem : NULL_TREE;
7969 case FIXED_CONVERT_EXPR:
7970 tem = fold_convert_const (code, type, arg0);
7971 return tem ? tem : NULL_TREE;
7973 case VIEW_CONVERT_EXPR:
7974 if (TREE_TYPE (op0) == type)
7976 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7977 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7979 /* For integral conversions with the same precision or pointer
7980 conversions use a NOP_EXPR instead. */
7981 if ((INTEGRAL_TYPE_P (type)
7982 || POINTER_TYPE_P (type))
7983 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7984 || POINTER_TYPE_P (TREE_TYPE (op0)))
7985 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7986 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7987 a sub-type to its base type as generated by the Ada FE. */
7988 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7989 && TREE_TYPE (TREE_TYPE (op0))))
7990 return fold_convert (type, op0);
7992 /* Strip inner integral conversions that do not change the precision. */
7993 if ((TREE_CODE (op0) == NOP_EXPR
7994 || TREE_CODE (op0) == CONVERT_EXPR)
7995 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7996 || POINTER_TYPE_P (TREE_TYPE (op0)))
7997 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7998 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7999 && (TYPE_PRECISION (TREE_TYPE (op0))
8000 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8001 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8003 return fold_view_convert_expr (type, op0);
8006 tem = fold_negate_expr (arg0);
8008 return fold_convert (type, tem);
8012 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8013 return fold_abs_const (arg0, type);
8014 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8015 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8016 /* Convert fabs((double)float) into (double)fabsf(float). */
8017 else if (TREE_CODE (arg0) == NOP_EXPR
8018 && TREE_CODE (type) == REAL_TYPE)
8020 tree targ0 = strip_float_extensions (arg0);
8022 return fold_convert (type, fold_build1 (ABS_EXPR,
8026 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8027 else if (TREE_CODE (arg0) == ABS_EXPR)
8029 else if (tree_expr_nonnegative_p (arg0))
8032 /* Strip sign ops from argument. */
8033 if (TREE_CODE (type) == REAL_TYPE)
8035 tem = fold_strip_sign_ops (arg0);
8037 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8042 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8043 return fold_convert (type, arg0);
8044 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8046 tree itype = TREE_TYPE (type);
8047 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8048 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8049 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8051 if (TREE_CODE (arg0) == COMPLEX_CST)
8053 tree itype = TREE_TYPE (type);
8054 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8055 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8056 return build_complex (type, rpart, negate_expr (ipart));
8058 if (TREE_CODE (arg0) == CONJ_EXPR)
8059 return fold_convert (type, TREE_OPERAND (arg0, 0));
8063 if (TREE_CODE (arg0) == INTEGER_CST)
8064 return fold_not_const (arg0, type);
8065 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8066 return fold_convert (type, TREE_OPERAND (arg0, 0));
8067 /* Convert ~ (-A) to A - 1. */
8068 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8069 return fold_build2 (MINUS_EXPR, type,
8070 fold_convert (type, TREE_OPERAND (arg0, 0)),
8071 build_int_cst (type, 1));
8072 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8073 else if (INTEGRAL_TYPE_P (type)
8074 && ((TREE_CODE (arg0) == MINUS_EXPR
8075 && integer_onep (TREE_OPERAND (arg0, 1)))
8076 || (TREE_CODE (arg0) == PLUS_EXPR
8077 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8078 return fold_build1 (NEGATE_EXPR, type,
8079 fold_convert (type, TREE_OPERAND (arg0, 0)));
8080 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8081 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8082 && (tem = fold_unary (BIT_NOT_EXPR, type,
8084 TREE_OPERAND (arg0, 0)))))
8085 return fold_build2 (BIT_XOR_EXPR, type, tem,
8086 fold_convert (type, TREE_OPERAND (arg0, 1)));
8087 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8088 && (tem = fold_unary (BIT_NOT_EXPR, type,
8090 TREE_OPERAND (arg0, 1)))))
8091 return fold_build2 (BIT_XOR_EXPR, type,
8092 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8093 /* Perform BIT_NOT_EXPR on each element individually. */
8094 else if (TREE_CODE (arg0) == VECTOR_CST)
8096 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8097 int count = TYPE_VECTOR_SUBPARTS (type), i;
8099 for (i = 0; i < count; i++)
8103 elem = TREE_VALUE (elements);
8104 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8105 if (elem == NULL_TREE)
8107 elements = TREE_CHAIN (elements);
8110 elem = build_int_cst (TREE_TYPE (type), -1);
8111 list = tree_cons (NULL_TREE, elem, list);
8114 return build_vector (type, nreverse (list));
8119 case TRUTH_NOT_EXPR:
8120 /* The argument to invert_truthvalue must have Boolean type. */
8121 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8122 arg0 = fold_convert (boolean_type_node, arg0);
8124 /* Note that the operand of this must be an int
8125 and its values must be 0 or 1.
8126 ("true" is a fixed value perhaps depending on the language,
8127 but we don't handle values other than 1 correctly yet.) */
8128 tem = fold_truth_not_expr (arg0);
8131 return fold_convert (type, tem);
8134 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8135 return fold_convert (type, arg0);
8136 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8137 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8138 TREE_OPERAND (arg0, 1));
8139 if (TREE_CODE (arg0) == COMPLEX_CST)
8140 return fold_convert (type, TREE_REALPART (arg0));
8141 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8143 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8144 tem = fold_build2 (TREE_CODE (arg0), itype,
8145 fold_build1 (REALPART_EXPR, itype,
8146 TREE_OPERAND (arg0, 0)),
8147 fold_build1 (REALPART_EXPR, itype,
8148 TREE_OPERAND (arg0, 1)));
8149 return fold_convert (type, tem);
8151 if (TREE_CODE (arg0) == CONJ_EXPR)
8153 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8154 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8155 return fold_convert (type, tem);
8157 if (TREE_CODE (arg0) == CALL_EXPR)
8159 tree fn = get_callee_fndecl (arg0);
8160 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8161 switch (DECL_FUNCTION_CODE (fn))
8163 CASE_FLT_FN (BUILT_IN_CEXPI):
8164 fn = mathfn_built_in (type, BUILT_IN_COS);
8166 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8176 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8177 return fold_convert (type, integer_zero_node);
8178 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8179 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8180 TREE_OPERAND (arg0, 0));
8181 if (TREE_CODE (arg0) == COMPLEX_CST)
8182 return fold_convert (type, TREE_IMAGPART (arg0));
8183 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8185 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8186 tem = fold_build2 (TREE_CODE (arg0), itype,
8187 fold_build1 (IMAGPART_EXPR, itype,
8188 TREE_OPERAND (arg0, 0)),
8189 fold_build1 (IMAGPART_EXPR, itype,
8190 TREE_OPERAND (arg0, 1)));
8191 return fold_convert (type, tem);
8193 if (TREE_CODE (arg0) == CONJ_EXPR)
8195 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8196 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8197 return fold_convert (type, negate_expr (tem));
8199 if (TREE_CODE (arg0) == CALL_EXPR)
8201 tree fn = get_callee_fndecl (arg0);
8202 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8203 switch (DECL_FUNCTION_CODE (fn))
8205 CASE_FLT_FN (BUILT_IN_CEXPI):
8206 fn = mathfn_built_in (type, BUILT_IN_SIN);
8208 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8219 } /* switch (code) */
8222 /* Fold a binary expression of code CODE and type TYPE with operands
8223 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8224 Return the folded expression if folding is successful. Otherwise,
8225 return NULL_TREE. */
8228 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8230 enum tree_code compl_code;
8232 if (code == MIN_EXPR)
8233 compl_code = MAX_EXPR;
8234 else if (code == MAX_EXPR)
8235 compl_code = MIN_EXPR;
8239 /* MIN (MAX (a, b), b) == b. */
8240 if (TREE_CODE (op0) == compl_code
8241 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8242 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8244 /* MIN (MAX (b, a), b) == b. */
8245 if (TREE_CODE (op0) == compl_code
8246 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8247 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8248 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8250 /* MIN (a, MAX (a, b)) == a. */
8251 if (TREE_CODE (op1) == compl_code
8252 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8253 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8254 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8256 /* MIN (a, MAX (b, a)) == a. */
8257 if (TREE_CODE (op1) == compl_code
8258 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8259 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8260 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8265 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8266 by changing CODE to reduce the magnitude of constants involved in
8267 ARG0 of the comparison.
8268 Returns a canonicalized comparison tree if a simplification was
8269 possible, otherwise returns NULL_TREE.
8270 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8271 valid if signed overflow is undefined. */
8274 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8275 tree arg0, tree arg1,
8276 bool *strict_overflow_p)
8278 enum tree_code code0 = TREE_CODE (arg0);
8279 tree t, cst0 = NULL_TREE;
8283 /* Match A +- CST code arg1 and CST code arg1. */
8284 if (!(((code0 == MINUS_EXPR
8285 || code0 == PLUS_EXPR)
8286 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8287 || code0 == INTEGER_CST))
8290 /* Identify the constant in arg0 and its sign. */
8291 if (code0 == INTEGER_CST)
8294 cst0 = TREE_OPERAND (arg0, 1);
8295 sgn0 = tree_int_cst_sgn (cst0);
8297 /* Overflowed constants and zero will cause problems. */
8298 if (integer_zerop (cst0)
8299 || TREE_OVERFLOW (cst0))
8302 /* See if we can reduce the magnitude of the constant in
8303 arg0 by changing the comparison code. */
8304 if (code0 == INTEGER_CST)
8306 /* CST <= arg1 -> CST-1 < arg1. */
8307 if (code == LE_EXPR && sgn0 == 1)
8309 /* -CST < arg1 -> -CST-1 <= arg1. */
8310 else if (code == LT_EXPR && sgn0 == -1)
8312 /* CST > arg1 -> CST-1 >= arg1. */
8313 else if (code == GT_EXPR && sgn0 == 1)
8315 /* -CST >= arg1 -> -CST-1 > arg1. */
8316 else if (code == GE_EXPR && sgn0 == -1)
8320 /* arg1 code' CST' might be more canonical. */
8325 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8327 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8329 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8330 else if (code == GT_EXPR
8331 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8333 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8334 else if (code == LE_EXPR
8335 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8337 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8338 else if (code == GE_EXPR
8339 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8343 *strict_overflow_p = true;
8346 /* Now build the constant reduced in magnitude. */
8347 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8348 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8349 if (code0 != INTEGER_CST)
8350 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8352 /* If swapping might yield to a more canonical form, do so. */
8354 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8356 return fold_build2 (code, type, t, arg1);
8359 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8360 overflow further. Try to decrease the magnitude of constants involved
8361 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8362 and put sole constants at the second argument position.
8363 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8366 maybe_canonicalize_comparison (enum tree_code code, tree type,
8367 tree arg0, tree arg1)
8370 bool strict_overflow_p;
8371 const char * const warnmsg = G_("assuming signed overflow does not occur "
8372 "when reducing constant in comparison");
8374 /* In principle pointers also have undefined overflow behavior,
8375 but that causes problems elsewhere. */
8376 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8377 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8380 /* Try canonicalization by simplifying arg0. */
8381 strict_overflow_p = false;
8382 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8383 &strict_overflow_p);
8386 if (strict_overflow_p)
8387 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8391 /* Try canonicalization by simplifying arg1 using the swapped
8393 code = swap_tree_comparison (code);
8394 strict_overflow_p = false;
8395 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8396 &strict_overflow_p);
8397 if (t && strict_overflow_p)
8398 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8402 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8403 space. This is used to avoid issuing overflow warnings for
8404 expressions like &p->x which can not wrap. */
8407 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8409 unsigned HOST_WIDE_INT offset_low, total_low;
8410 HOST_WIDE_INT size, offset_high, total_high;
8412 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8418 if (offset == NULL_TREE)
8423 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8427 offset_low = TREE_INT_CST_LOW (offset);
8428 offset_high = TREE_INT_CST_HIGH (offset);
8431 if (add_double_with_sign (offset_low, offset_high,
8432 bitpos / BITS_PER_UNIT, 0,
8433 &total_low, &total_high,
8437 if (total_high != 0)
8440 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8444 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8446 if (TREE_CODE (base) == ADDR_EXPR)
8448 HOST_WIDE_INT base_size;
8450 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8451 if (base_size > 0 && size < base_size)
8455 return total_low > (unsigned HOST_WIDE_INT) size;
8458 /* Subroutine of fold_binary. This routine performs all of the
8459 transformations that are common to the equality/inequality
8460 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8461 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8462 fold_binary should call fold_binary. Fold a comparison with
8463 tree code CODE and type TYPE with operands OP0 and OP1. Return
8464 the folded comparison or NULL_TREE. */
8467 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8469 tree arg0, arg1, tem;
8474 STRIP_SIGN_NOPS (arg0);
8475 STRIP_SIGN_NOPS (arg1);
8477 tem = fold_relational_const (code, type, arg0, arg1);
8478 if (tem != NULL_TREE)
8481 /* If one arg is a real or integer constant, put it last. */
8482 if (tree_swap_operands_p (arg0, arg1, true))
8483 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8485 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8486 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8487 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8488 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8489 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8490 && (TREE_CODE (arg1) == INTEGER_CST
8491 && !TREE_OVERFLOW (arg1)))
8493 tree const1 = TREE_OPERAND (arg0, 1);
8495 tree variable = TREE_OPERAND (arg0, 0);
8498 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8500 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8501 TREE_TYPE (arg1), const2, const1);
8503 /* If the constant operation overflowed this can be
8504 simplified as a comparison against INT_MAX/INT_MIN. */
8505 if (TREE_CODE (lhs) == INTEGER_CST
8506 && TREE_OVERFLOW (lhs))
8508 int const1_sgn = tree_int_cst_sgn (const1);
8509 enum tree_code code2 = code;
8511 /* Get the sign of the constant on the lhs if the
8512 operation were VARIABLE + CONST1. */
8513 if (TREE_CODE (arg0) == MINUS_EXPR)
8514 const1_sgn = -const1_sgn;
8516 /* The sign of the constant determines if we overflowed
8517 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8518 Canonicalize to the INT_MIN overflow by swapping the comparison
8520 if (const1_sgn == -1)
8521 code2 = swap_tree_comparison (code);
8523 /* We now can look at the canonicalized case
8524 VARIABLE + 1 CODE2 INT_MIN
8525 and decide on the result. */
8526 if (code2 == LT_EXPR
8528 || code2 == EQ_EXPR)
8529 return omit_one_operand (type, boolean_false_node, variable);
8530 else if (code2 == NE_EXPR
8532 || code2 == GT_EXPR)
8533 return omit_one_operand (type, boolean_true_node, variable);
8536 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8537 && (TREE_CODE (lhs) != INTEGER_CST
8538 || !TREE_OVERFLOW (lhs)))
8540 fold_overflow_warning (("assuming signed overflow does not occur "
8541 "when changing X +- C1 cmp C2 to "
8543 WARN_STRICT_OVERFLOW_COMPARISON);
8544 return fold_build2 (code, type, variable, lhs);
8548 /* For comparisons of pointers we can decompose it to a compile time
8549 comparison of the base objects and the offsets into the object.
8550 This requires at least one operand being an ADDR_EXPR or a
8551 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8552 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8553 && (TREE_CODE (arg0) == ADDR_EXPR
8554 || TREE_CODE (arg1) == ADDR_EXPR
8555 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8556 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8558 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8559 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8560 enum machine_mode mode;
8561 int volatilep, unsignedp;
8562 bool indirect_base0 = false, indirect_base1 = false;
8564 /* Get base and offset for the access. Strip ADDR_EXPR for
8565 get_inner_reference, but put it back by stripping INDIRECT_REF
8566 off the base object if possible. indirect_baseN will be true
8567 if baseN is not an address but refers to the object itself. */
8569 if (TREE_CODE (arg0) == ADDR_EXPR)
8571 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8572 &bitsize, &bitpos0, &offset0, &mode,
8573 &unsignedp, &volatilep, false);
8574 if (TREE_CODE (base0) == INDIRECT_REF)
8575 base0 = TREE_OPERAND (base0, 0);
8577 indirect_base0 = true;
8579 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8581 base0 = TREE_OPERAND (arg0, 0);
8582 offset0 = TREE_OPERAND (arg0, 1);
8586 if (TREE_CODE (arg1) == ADDR_EXPR)
8588 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8589 &bitsize, &bitpos1, &offset1, &mode,
8590 &unsignedp, &volatilep, false);
8591 if (TREE_CODE (base1) == INDIRECT_REF)
8592 base1 = TREE_OPERAND (base1, 0);
8594 indirect_base1 = true;
8596 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8598 base1 = TREE_OPERAND (arg1, 0);
8599 offset1 = TREE_OPERAND (arg1, 1);
8602 /* If we have equivalent bases we might be able to simplify. */
8603 if (indirect_base0 == indirect_base1
8604 && operand_equal_p (base0, base1, 0))
8606 /* We can fold this expression to a constant if the non-constant
8607 offset parts are equal. */
8608 if ((offset0 == offset1
8609 || (offset0 && offset1
8610 && operand_equal_p (offset0, offset1, 0)))
8613 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8618 && bitpos0 != bitpos1
8619 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8620 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8621 fold_overflow_warning (("assuming pointer wraparound does not "
8622 "occur when comparing P +- C1 with "
8624 WARN_STRICT_OVERFLOW_CONDITIONAL);
8629 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8631 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8633 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8635 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8637 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8639 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8643 /* We can simplify the comparison to a comparison of the variable
8644 offset parts if the constant offset parts are equal.
8645 Be careful to use signed size type here because otherwise we
8646 mess with array offsets in the wrong way. This is possible
8647 because pointer arithmetic is restricted to retain within an
8648 object and overflow on pointer differences is undefined as of
8649 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8650 else if (bitpos0 == bitpos1
8651 && ((code == EQ_EXPR || code == NE_EXPR)
8652 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8654 tree signed_size_type_node;
8655 signed_size_type_node = signed_type_for (size_type_node);
8657 /* By converting to signed size type we cover middle-end pointer
8658 arithmetic which operates on unsigned pointer types of size
8659 type size and ARRAY_REF offsets which are properly sign or
8660 zero extended from their type in case it is narrower than
8662 if (offset0 == NULL_TREE)
8663 offset0 = build_int_cst (signed_size_type_node, 0);
8665 offset0 = fold_convert (signed_size_type_node, offset0);
8666 if (offset1 == NULL_TREE)
8667 offset1 = build_int_cst (signed_size_type_node, 0);
8669 offset1 = fold_convert (signed_size_type_node, offset1);
8673 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8674 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8675 fold_overflow_warning (("assuming pointer wraparound does not "
8676 "occur when comparing P +- C1 with "
8678 WARN_STRICT_OVERFLOW_COMPARISON);
8680 return fold_build2 (code, type, offset0, offset1);
8683 /* For non-equal bases we can simplify if they are addresses
8684 of local binding decls or constants. */
8685 else if (indirect_base0 && indirect_base1
8686 /* We know that !operand_equal_p (base0, base1, 0)
8687 because the if condition was false. But make
8688 sure two decls are not the same. */
8690 && TREE_CODE (arg0) == ADDR_EXPR
8691 && TREE_CODE (arg1) == ADDR_EXPR
8692 && (((TREE_CODE (base0) == VAR_DECL
8693 || TREE_CODE (base0) == PARM_DECL)
8694 && (targetm.binds_local_p (base0)
8695 || CONSTANT_CLASS_P (base1)))
8696 || CONSTANT_CLASS_P (base0))
8697 && (((TREE_CODE (base1) == VAR_DECL
8698 || TREE_CODE (base1) == PARM_DECL)
8699 && (targetm.binds_local_p (base1)
8700 || CONSTANT_CLASS_P (base0)))
8701 || CONSTANT_CLASS_P (base1)))
8703 if (code == EQ_EXPR)
8704 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8705 else if (code == NE_EXPR)
8706 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8708 /* For equal offsets we can simplify to a comparison of the
8710 else if (bitpos0 == bitpos1
8712 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8714 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8715 && ((offset0 == offset1)
8716 || (offset0 && offset1
8717 && operand_equal_p (offset0, offset1, 0))))
8720 base0 = fold_addr_expr (base0);
8722 base1 = fold_addr_expr (base1);
8723 return fold_build2 (code, type, base0, base1);
8727 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8728 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8729 the resulting offset is smaller in absolute value than the
8731 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8732 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8733 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8734 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8735 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8736 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8737 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8739 tree const1 = TREE_OPERAND (arg0, 1);
8740 tree const2 = TREE_OPERAND (arg1, 1);
8741 tree variable1 = TREE_OPERAND (arg0, 0);
8742 tree variable2 = TREE_OPERAND (arg1, 0);
8744 const char * const warnmsg = G_("assuming signed overflow does not "
8745 "occur when combining constants around "
8748 /* Put the constant on the side where it doesn't overflow and is
8749 of lower absolute value than before. */
8750 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8751 ? MINUS_EXPR : PLUS_EXPR,
8753 if (!TREE_OVERFLOW (cst)
8754 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8756 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8757 return fold_build2 (code, type,
8759 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8763 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8764 ? MINUS_EXPR : PLUS_EXPR,
8766 if (!TREE_OVERFLOW (cst)
8767 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8769 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8770 return fold_build2 (code, type,
8771 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8777 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8778 signed arithmetic case. That form is created by the compiler
8779 often enough for folding it to be of value. One example is in
8780 computing loop trip counts after Operator Strength Reduction. */
8781 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8782 && TREE_CODE (arg0) == MULT_EXPR
8783 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8784 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8785 && integer_zerop (arg1))
8787 tree const1 = TREE_OPERAND (arg0, 1);
8788 tree const2 = arg1; /* zero */
8789 tree variable1 = TREE_OPERAND (arg0, 0);
8790 enum tree_code cmp_code = code;
8792 gcc_assert (!integer_zerop (const1));
8794 fold_overflow_warning (("assuming signed overflow does not occur when "
8795 "eliminating multiplication in comparison "
8797 WARN_STRICT_OVERFLOW_COMPARISON);
8799 /* If const1 is negative we swap the sense of the comparison. */
8800 if (tree_int_cst_sgn (const1) < 0)
8801 cmp_code = swap_tree_comparison (cmp_code);
8803 return fold_build2 (cmp_code, type, variable1, const2);
8806 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8810 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8812 tree targ0 = strip_float_extensions (arg0);
8813 tree targ1 = strip_float_extensions (arg1);
8814 tree newtype = TREE_TYPE (targ0);
8816 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8817 newtype = TREE_TYPE (targ1);
8819 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8820 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8821 return fold_build2 (code, type, fold_convert (newtype, targ0),
8822 fold_convert (newtype, targ1));
8824 /* (-a) CMP (-b) -> b CMP a */
8825 if (TREE_CODE (arg0) == NEGATE_EXPR
8826 && TREE_CODE (arg1) == NEGATE_EXPR)
8827 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8828 TREE_OPERAND (arg0, 0));
8830 if (TREE_CODE (arg1) == REAL_CST)
8832 REAL_VALUE_TYPE cst;
8833 cst = TREE_REAL_CST (arg1);
8835 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8836 if (TREE_CODE (arg0) == NEGATE_EXPR)
8837 return fold_build2 (swap_tree_comparison (code), type,
8838 TREE_OPERAND (arg0, 0),
8839 build_real (TREE_TYPE (arg1),
8840 REAL_VALUE_NEGATE (cst)));
8842 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8843 /* a CMP (-0) -> a CMP 0 */
8844 if (REAL_VALUE_MINUS_ZERO (cst))
8845 return fold_build2 (code, type, arg0,
8846 build_real (TREE_TYPE (arg1), dconst0));
8848 /* x != NaN is always true, other ops are always false. */
8849 if (REAL_VALUE_ISNAN (cst)
8850 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8852 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8853 return omit_one_operand (type, tem, arg0);
8856 /* Fold comparisons against infinity. */
8857 if (REAL_VALUE_ISINF (cst))
8859 tem = fold_inf_compare (code, type, arg0, arg1);
8860 if (tem != NULL_TREE)
8865 /* If this is a comparison of a real constant with a PLUS_EXPR
8866 or a MINUS_EXPR of a real constant, we can convert it into a
8867 comparison with a revised real constant as long as no overflow
8868 occurs when unsafe_math_optimizations are enabled. */
8869 if (flag_unsafe_math_optimizations
8870 && TREE_CODE (arg1) == REAL_CST
8871 && (TREE_CODE (arg0) == PLUS_EXPR
8872 || TREE_CODE (arg0) == MINUS_EXPR)
8873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8874 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8875 ? MINUS_EXPR : PLUS_EXPR,
8876 arg1, TREE_OPERAND (arg0, 1), 0))
8877 && !TREE_OVERFLOW (tem))
8878 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8880 /* Likewise, we can simplify a comparison of a real constant with
8881 a MINUS_EXPR whose first operand is also a real constant, i.e.
8882 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8883 floating-point types only if -fassociative-math is set. */
8884 if (flag_associative_math
8885 && TREE_CODE (arg1) == REAL_CST
8886 && TREE_CODE (arg0) == MINUS_EXPR
8887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8888 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8890 && !TREE_OVERFLOW (tem))
8891 return fold_build2 (swap_tree_comparison (code), type,
8892 TREE_OPERAND (arg0, 1), tem);
8894 /* Fold comparisons against built-in math functions. */
8895 if (TREE_CODE (arg1) == REAL_CST
8896 && flag_unsafe_math_optimizations
8897 && ! flag_errno_math)
8899 enum built_in_function fcode = builtin_mathfn_code (arg0);
8901 if (fcode != END_BUILTINS)
8903 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8904 if (tem != NULL_TREE)
8910 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8911 && (TREE_CODE (arg0) == NOP_EXPR
8912 || TREE_CODE (arg0) == CONVERT_EXPR))
8914 /* If we are widening one operand of an integer comparison,
8915 see if the other operand is similarly being widened. Perhaps we
8916 can do the comparison in the narrower type. */
8917 tem = fold_widened_comparison (code, type, arg0, arg1);
8921 /* Or if we are changing signedness. */
8922 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8927 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8928 constant, we can simplify it. */
8929 if (TREE_CODE (arg1) == INTEGER_CST
8930 && (TREE_CODE (arg0) == MIN_EXPR
8931 || TREE_CODE (arg0) == MAX_EXPR)
8932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8934 tem = optimize_minmax_comparison (code, type, op0, op1);
8939 /* Simplify comparison of something with itself. (For IEEE
8940 floating-point, we can only do some of these simplifications.) */
8941 if (operand_equal_p (arg0, arg1, 0))
8946 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8947 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8948 return constant_boolean_node (1, type);
8953 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8954 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8955 return constant_boolean_node (1, type);
8956 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8959 /* For NE, we can only do this simplification if integer
8960 or we don't honor IEEE floating point NaNs. */
8961 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8962 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8964 /* ... fall through ... */
8967 return constant_boolean_node (0, type);
8973 /* If we are comparing an expression that just has comparisons
8974 of two integer values, arithmetic expressions of those comparisons,
8975 and constants, we can simplify it. There are only three cases
8976 to check: the two values can either be equal, the first can be
8977 greater, or the second can be greater. Fold the expression for
8978 those three values. Since each value must be 0 or 1, we have
8979 eight possibilities, each of which corresponds to the constant 0
8980 or 1 or one of the six possible comparisons.
8982 This handles common cases like (a > b) == 0 but also handles
8983 expressions like ((x > y) - (y > x)) > 0, which supposedly
8984 occur in macroized code. */
8986 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8988 tree cval1 = 0, cval2 = 0;
8991 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8992 /* Don't handle degenerate cases here; they should already
8993 have been handled anyway. */
8994 && cval1 != 0 && cval2 != 0
8995 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8996 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8997 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8998 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8999 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9000 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9001 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9003 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9004 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9006 /* We can't just pass T to eval_subst in case cval1 or cval2
9007 was the same as ARG1. */
9010 = fold_build2 (code, type,
9011 eval_subst (arg0, cval1, maxval,
9015 = fold_build2 (code, type,
9016 eval_subst (arg0, cval1, maxval,
9020 = fold_build2 (code, type,
9021 eval_subst (arg0, cval1, minval,
9025 /* All three of these results should be 0 or 1. Confirm they are.
9026 Then use those values to select the proper code to use. */
9028 if (TREE_CODE (high_result) == INTEGER_CST
9029 && TREE_CODE (equal_result) == INTEGER_CST
9030 && TREE_CODE (low_result) == INTEGER_CST)
9032 /* Make a 3-bit mask with the high-order bit being the
9033 value for `>', the next for '=', and the low for '<'. */
9034 switch ((integer_onep (high_result) * 4)
9035 + (integer_onep (equal_result) * 2)
9036 + integer_onep (low_result))
9040 return omit_one_operand (type, integer_zero_node, arg0);
9061 return omit_one_operand (type, integer_one_node, arg0);
9065 return save_expr (build2 (code, type, cval1, cval2));
9066 return fold_build2 (code, type, cval1, cval2);
9071 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9072 into a single range test. */
9073 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9074 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9075 && TREE_CODE (arg1) == INTEGER_CST
9076 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9077 && !integer_zerop (TREE_OPERAND (arg0, 1))
9078 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9079 && !TREE_OVERFLOW (arg1))
9081 tem = fold_div_compare (code, type, arg0, arg1);
9082 if (tem != NULL_TREE)
9086 /* Fold ~X op ~Y as Y op X. */
9087 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9088 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9090 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9091 return fold_build2 (code, type,
9092 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9093 TREE_OPERAND (arg0, 0));
9096 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9097 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9098 && TREE_CODE (arg1) == INTEGER_CST)
9100 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9101 return fold_build2 (swap_tree_comparison (code), type,
9102 TREE_OPERAND (arg0, 0),
9103 fold_build1 (BIT_NOT_EXPR, cmp_type,
9104 fold_convert (cmp_type, arg1)));
9111 /* Subroutine of fold_binary. Optimize complex multiplications of the
9112 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9113 argument EXPR represents the expression "z" of type TYPE. */
9116 fold_mult_zconjz (tree type, tree expr)
9118 tree itype = TREE_TYPE (type);
9119 tree rpart, ipart, tem;
9121 if (TREE_CODE (expr) == COMPLEX_EXPR)
9123 rpart = TREE_OPERAND (expr, 0);
9124 ipart = TREE_OPERAND (expr, 1);
9126 else if (TREE_CODE (expr) == COMPLEX_CST)
9128 rpart = TREE_REALPART (expr);
9129 ipart = TREE_IMAGPART (expr);
9133 expr = save_expr (expr);
9134 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9135 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9138 rpart = save_expr (rpart);
9139 ipart = save_expr (ipart);
9140 tem = fold_build2 (PLUS_EXPR, itype,
9141 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9142 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9143 return fold_build2 (COMPLEX_EXPR, type, tem,
9144 fold_convert (itype, integer_zero_node));
9148 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9149 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9150 guarantees that P and N have the same least significant log2(M) bits.
9151 N is not otherwise constrained. In particular, N is not normalized to
9152 0 <= N < M as is common. In general, the precise value of P is unknown.
9153 M is chosen as large as possible such that constant N can be determined.
9155 Returns M and sets *RESIDUE to N. */
9157 static unsigned HOST_WIDE_INT
9158 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9160 enum tree_code code;
9164 code = TREE_CODE (expr);
9165 if (code == ADDR_EXPR)
9167 expr = TREE_OPERAND (expr, 0);
9168 if (handled_component_p (expr))
9170 HOST_WIDE_INT bitsize, bitpos;
9172 enum machine_mode mode;
9173 int unsignedp, volatilep;
9175 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9176 &mode, &unsignedp, &volatilep, false);
9177 *residue = bitpos / BITS_PER_UNIT;
9180 if (TREE_CODE (offset) == INTEGER_CST)
9181 *residue += TREE_INT_CST_LOW (offset);
9183 /* We don't handle more complicated offset expressions. */
9188 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9189 return DECL_ALIGN_UNIT (expr);
9191 else if (code == POINTER_PLUS_EXPR)
9194 unsigned HOST_WIDE_INT modulus;
9195 enum tree_code inner_code;
9197 op0 = TREE_OPERAND (expr, 0);
9199 modulus = get_pointer_modulus_and_residue (op0, residue);
9201 op1 = TREE_OPERAND (expr, 1);
9203 inner_code = TREE_CODE (op1);
9204 if (inner_code == INTEGER_CST)
9206 *residue += TREE_INT_CST_LOW (op1);
9209 else if (inner_code == MULT_EXPR)
9211 op1 = TREE_OPERAND (op1, 1);
9212 if (TREE_CODE (op1) == INTEGER_CST)
9214 unsigned HOST_WIDE_INT align;
9216 /* Compute the greatest power-of-2 divisor of op1. */
9217 align = TREE_INT_CST_LOW (op1);
9220 /* If align is non-zero and less than *modulus, replace
9221 *modulus with align., If align is 0, then either op1 is 0
9222 or the greatest power-of-2 divisor of op1 doesn't fit in an
9223 unsigned HOST_WIDE_INT. In either case, no additional
9224 constraint is imposed. */
9226 modulus = MIN (modulus, align);
9233 /* If we get here, we were unable to determine anything useful about the
9239 /* Fold a binary expression of code CODE and type TYPE with operands
9240 OP0 and OP1. Return the folded expression if folding is
9241 successful. Otherwise, return NULL_TREE. */
9244 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9246 enum tree_code_class kind = TREE_CODE_CLASS (code);
9247 tree arg0, arg1, tem;
9248 tree t1 = NULL_TREE;
9249 bool strict_overflow_p;
9251 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9252 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9253 && TREE_CODE_LENGTH (code) == 2
9255 && op1 != NULL_TREE);
9260 /* Strip any conversions that don't change the mode. This is
9261 safe for every expression, except for a comparison expression
9262 because its signedness is derived from its operands. So, in
9263 the latter case, only strip conversions that don't change the
9264 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9267 Note that this is done as an internal manipulation within the
9268 constant folder, in order to find the simplest representation
9269 of the arguments so that their form can be studied. In any
9270 cases, the appropriate type conversions should be put back in
9271 the tree that will get out of the constant folder. */
9273 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9275 STRIP_SIGN_NOPS (arg0);
9276 STRIP_SIGN_NOPS (arg1);
9284 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9285 constant but we can't do arithmetic on them. */
9286 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9287 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9288 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9289 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9290 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9291 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9293 if (kind == tcc_binary)
9295 /* Make sure type and arg0 have the same saturating flag. */
9296 gcc_assert (TYPE_SATURATING (type)
9297 == TYPE_SATURATING (TREE_TYPE (arg0)));
9298 tem = const_binop (code, arg0, arg1, 0);
9300 else if (kind == tcc_comparison)
9301 tem = fold_relational_const (code, type, arg0, arg1);
9305 if (tem != NULL_TREE)
9307 if (TREE_TYPE (tem) != type)
9308 tem = fold_convert (type, tem);
9313 /* If this is a commutative operation, and ARG0 is a constant, move it
9314 to ARG1 to reduce the number of tests below. */
9315 if (commutative_tree_code (code)
9316 && tree_swap_operands_p (arg0, arg1, true))
9317 return fold_build2 (code, type, op1, op0);
9319 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9321 First check for cases where an arithmetic operation is applied to a
9322 compound, conditional, or comparison operation. Push the arithmetic
9323 operation inside the compound or conditional to see if any folding
9324 can then be done. Convert comparison to conditional for this purpose.
9325 The also optimizes non-constant cases that used to be done in
9328 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9329 one of the operands is a comparison and the other is a comparison, a
9330 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9331 code below would make the expression more complex. Change it to a
9332 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9333 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9335 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9336 || code == EQ_EXPR || code == NE_EXPR)
9337 && ((truth_value_p (TREE_CODE (arg0))
9338 && (truth_value_p (TREE_CODE (arg1))
9339 || (TREE_CODE (arg1) == BIT_AND_EXPR
9340 && integer_onep (TREE_OPERAND (arg1, 1)))))
9341 || (truth_value_p (TREE_CODE (arg1))
9342 && (truth_value_p (TREE_CODE (arg0))
9343 || (TREE_CODE (arg0) == BIT_AND_EXPR
9344 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9346 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9347 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9350 fold_convert (boolean_type_node, arg0),
9351 fold_convert (boolean_type_node, arg1));
9353 if (code == EQ_EXPR)
9354 tem = invert_truthvalue (tem);
9356 return fold_convert (type, tem);
9359 if (TREE_CODE_CLASS (code) == tcc_binary
9360 || TREE_CODE_CLASS (code) == tcc_comparison)
9362 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9363 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9364 fold_build2 (code, type,
9365 fold_convert (TREE_TYPE (op0),
9366 TREE_OPERAND (arg0, 1)),
9368 if (TREE_CODE (arg1) == COMPOUND_EXPR
9369 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9370 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9371 fold_build2 (code, type, op0,
9372 fold_convert (TREE_TYPE (op1),
9373 TREE_OPERAND (arg1, 1))));
9375 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9377 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9379 /*cond_first_p=*/1);
9380 if (tem != NULL_TREE)
9384 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9386 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9388 /*cond_first_p=*/0);
9389 if (tem != NULL_TREE)
9396 case POINTER_PLUS_EXPR:
9397 /* 0 +p index -> (type)index */
9398 if (integer_zerop (arg0))
9399 return non_lvalue (fold_convert (type, arg1));
9401 /* PTR +p 0 -> PTR */
9402 if (integer_zerop (arg1))
9403 return non_lvalue (fold_convert (type, arg0));
9405 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9406 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9407 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9408 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9409 fold_convert (sizetype, arg1),
9410 fold_convert (sizetype, arg0)));
9412 /* index +p PTR -> PTR +p index */
9413 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9414 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9415 return fold_build2 (POINTER_PLUS_EXPR, type,
9416 fold_convert (type, arg1),
9417 fold_convert (sizetype, arg0));
9419 /* (PTR +p B) +p A -> PTR +p (B + A) */
9420 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9423 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9424 tree arg00 = TREE_OPERAND (arg0, 0);
9425 inner = fold_build2 (PLUS_EXPR, sizetype,
9426 arg01, fold_convert (sizetype, arg1));
9427 return fold_convert (type,
9428 fold_build2 (POINTER_PLUS_EXPR,
9429 TREE_TYPE (arg00), arg00, inner));
9432 /* PTR_CST +p CST -> CST1 */
9433 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9434 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9436 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9437 of the array. Loop optimizer sometimes produce this type of
9439 if (TREE_CODE (arg0) == ADDR_EXPR)
9441 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9443 return fold_convert (type, tem);
9449 /* PTR + INT -> (INT)(PTR p+ INT) */
9450 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9451 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9452 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9455 fold_convert (sizetype, arg1)));
9456 /* INT + PTR -> (INT)(PTR p+ INT) */
9457 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9458 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9459 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9462 fold_convert (sizetype, arg0)));
9463 /* A + (-B) -> A - B */
9464 if (TREE_CODE (arg1) == NEGATE_EXPR)
9465 return fold_build2 (MINUS_EXPR, type,
9466 fold_convert (type, arg0),
9467 fold_convert (type, TREE_OPERAND (arg1, 0)));
9468 /* (-A) + B -> B - A */
9469 if (TREE_CODE (arg0) == NEGATE_EXPR
9470 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9471 return fold_build2 (MINUS_EXPR, type,
9472 fold_convert (type, arg1),
9473 fold_convert (type, TREE_OPERAND (arg0, 0)));
9475 if (INTEGRAL_TYPE_P (type))
9477 /* Convert ~A + 1 to -A. */
9478 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9479 && integer_onep (arg1))
9480 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9483 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9484 && !TYPE_OVERFLOW_TRAPS (type))
9486 tree tem = TREE_OPERAND (arg0, 0);
9489 if (operand_equal_p (tem, arg1, 0))
9491 t1 = build_int_cst_type (type, -1);
9492 return omit_one_operand (type, t1, arg1);
9497 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9498 && !TYPE_OVERFLOW_TRAPS (type))
9500 tree tem = TREE_OPERAND (arg1, 0);
9503 if (operand_equal_p (arg0, tem, 0))
9505 t1 = build_int_cst_type (type, -1);
9506 return omit_one_operand (type, t1, arg0);
9510 /* X + (X / CST) * -CST is X % CST. */
9511 if (TREE_CODE (arg1) == MULT_EXPR
9512 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9513 && operand_equal_p (arg0,
9514 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9516 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9517 tree cst1 = TREE_OPERAND (arg1, 1);
9518 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9519 if (sum && integer_zerop (sum))
9520 return fold_convert (type,
9521 fold_build2 (TRUNC_MOD_EXPR,
9522 TREE_TYPE (arg0), arg0, cst0));
9526 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9527 same or one. Make sure type is not saturating.
9528 fold_plusminus_mult_expr will re-associate. */
9529 if ((TREE_CODE (arg0) == MULT_EXPR
9530 || TREE_CODE (arg1) == MULT_EXPR)
9531 && !TYPE_SATURATING (type)
9532 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9534 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9539 if (! FLOAT_TYPE_P (type))
9541 if (integer_zerop (arg1))
9542 return non_lvalue (fold_convert (type, arg0));
9544 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9545 with a constant, and the two constants have no bits in common,
9546 we should treat this as a BIT_IOR_EXPR since this may produce more
9548 if (TREE_CODE (arg0) == BIT_AND_EXPR
9549 && TREE_CODE (arg1) == BIT_AND_EXPR
9550 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9551 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9552 && integer_zerop (const_binop (BIT_AND_EXPR,
9553 TREE_OPERAND (arg0, 1),
9554 TREE_OPERAND (arg1, 1), 0)))
9556 code = BIT_IOR_EXPR;
9560 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9561 (plus (plus (mult) (mult)) (foo)) so that we can
9562 take advantage of the factoring cases below. */
9563 if (((TREE_CODE (arg0) == PLUS_EXPR
9564 || TREE_CODE (arg0) == MINUS_EXPR)
9565 && TREE_CODE (arg1) == MULT_EXPR)
9566 || ((TREE_CODE (arg1) == PLUS_EXPR
9567 || TREE_CODE (arg1) == MINUS_EXPR)
9568 && TREE_CODE (arg0) == MULT_EXPR))
9570 tree parg0, parg1, parg, marg;
9571 enum tree_code pcode;
9573 if (TREE_CODE (arg1) == MULT_EXPR)
9574 parg = arg0, marg = arg1;
9576 parg = arg1, marg = arg0;
9577 pcode = TREE_CODE (parg);
9578 parg0 = TREE_OPERAND (parg, 0);
9579 parg1 = TREE_OPERAND (parg, 1);
9583 if (TREE_CODE (parg0) == MULT_EXPR
9584 && TREE_CODE (parg1) != MULT_EXPR)
9585 return fold_build2 (pcode, type,
9586 fold_build2 (PLUS_EXPR, type,
9587 fold_convert (type, parg0),
9588 fold_convert (type, marg)),
9589 fold_convert (type, parg1));
9590 if (TREE_CODE (parg0) != MULT_EXPR
9591 && TREE_CODE (parg1) == MULT_EXPR)
9592 return fold_build2 (PLUS_EXPR, type,
9593 fold_convert (type, parg0),
9594 fold_build2 (pcode, type,
9595 fold_convert (type, marg),
9602 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9603 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9604 return non_lvalue (fold_convert (type, arg0));
9606 /* Likewise if the operands are reversed. */
9607 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9608 return non_lvalue (fold_convert (type, arg1));
9610 /* Convert X + -C into X - C. */
9611 if (TREE_CODE (arg1) == REAL_CST
9612 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9614 tem = fold_negate_const (arg1, type);
9615 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9616 return fold_build2 (MINUS_EXPR, type,
9617 fold_convert (type, arg0),
9618 fold_convert (type, tem));
9621 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9622 to __complex__ ( x, y ). This is not the same for SNaNs or
9623 if signed zeros are involved. */
9624 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9625 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9626 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9628 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9629 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9630 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9631 bool arg0rz = false, arg0iz = false;
9632 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9633 || (arg0i && (arg0iz = real_zerop (arg0i))))
9635 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9636 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9637 if (arg0rz && arg1i && real_zerop (arg1i))
9639 tree rp = arg1r ? arg1r
9640 : build1 (REALPART_EXPR, rtype, arg1);
9641 tree ip = arg0i ? arg0i
9642 : build1 (IMAGPART_EXPR, rtype, arg0);
9643 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9645 else if (arg0iz && arg1r && real_zerop (arg1r))
9647 tree rp = arg0r ? arg0r
9648 : build1 (REALPART_EXPR, rtype, arg0);
9649 tree ip = arg1i ? arg1i
9650 : build1 (IMAGPART_EXPR, rtype, arg1);
9651 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9656 if (flag_unsafe_math_optimizations
9657 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9658 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9659 && (tem = distribute_real_division (code, type, arg0, arg1)))
9662 /* Convert x+x into x*2.0. */
9663 if (operand_equal_p (arg0, arg1, 0)
9664 && SCALAR_FLOAT_TYPE_P (type))
9665 return fold_build2 (MULT_EXPR, type, arg0,
9666 build_real (type, dconst2));
9668 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9669 We associate floats only if the user has specified
9670 -fassociative-math. */
9671 if (flag_associative_math
9672 && TREE_CODE (arg1) == PLUS_EXPR
9673 && TREE_CODE (arg0) != MULT_EXPR)
9675 tree tree10 = TREE_OPERAND (arg1, 0);
9676 tree tree11 = TREE_OPERAND (arg1, 1);
9677 if (TREE_CODE (tree11) == MULT_EXPR
9678 && TREE_CODE (tree10) == MULT_EXPR)
9681 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9682 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9685 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9686 We associate floats only if the user has specified
9687 -fassociative-math. */
9688 if (flag_associative_math
9689 && TREE_CODE (arg0) == PLUS_EXPR
9690 && TREE_CODE (arg1) != MULT_EXPR)
9692 tree tree00 = TREE_OPERAND (arg0, 0);
9693 tree tree01 = TREE_OPERAND (arg0, 1);
9694 if (TREE_CODE (tree01) == MULT_EXPR
9695 && TREE_CODE (tree00) == MULT_EXPR)
9698 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9699 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9705 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9706 is a rotate of A by C1 bits. */
9707 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9708 is a rotate of A by B bits. */
9710 enum tree_code code0, code1;
9712 code0 = TREE_CODE (arg0);
9713 code1 = TREE_CODE (arg1);
9714 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9715 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9716 && operand_equal_p (TREE_OPERAND (arg0, 0),
9717 TREE_OPERAND (arg1, 0), 0)
9718 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9719 TYPE_UNSIGNED (rtype))
9720 /* Only create rotates in complete modes. Other cases are not
9721 expanded properly. */
9722 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9724 tree tree01, tree11;
9725 enum tree_code code01, code11;
9727 tree01 = TREE_OPERAND (arg0, 1);
9728 tree11 = TREE_OPERAND (arg1, 1);
9729 STRIP_NOPS (tree01);
9730 STRIP_NOPS (tree11);
9731 code01 = TREE_CODE (tree01);
9732 code11 = TREE_CODE (tree11);
9733 if (code01 == INTEGER_CST
9734 && code11 == INTEGER_CST
9735 && TREE_INT_CST_HIGH (tree01) == 0
9736 && TREE_INT_CST_HIGH (tree11) == 0
9737 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9738 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9739 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9740 code0 == LSHIFT_EXPR ? tree01 : tree11);
9741 else if (code11 == MINUS_EXPR)
9743 tree tree110, tree111;
9744 tree110 = TREE_OPERAND (tree11, 0);
9745 tree111 = TREE_OPERAND (tree11, 1);
9746 STRIP_NOPS (tree110);
9747 STRIP_NOPS (tree111);
9748 if (TREE_CODE (tree110) == INTEGER_CST
9749 && 0 == compare_tree_int (tree110,
9751 (TREE_TYPE (TREE_OPERAND
9753 && operand_equal_p (tree01, tree111, 0))
9754 return build2 ((code0 == LSHIFT_EXPR
9757 type, TREE_OPERAND (arg0, 0), tree01);
9759 else if (code01 == MINUS_EXPR)
9761 tree tree010, tree011;
9762 tree010 = TREE_OPERAND (tree01, 0);
9763 tree011 = TREE_OPERAND (tree01, 1);
9764 STRIP_NOPS (tree010);
9765 STRIP_NOPS (tree011);
9766 if (TREE_CODE (tree010) == INTEGER_CST
9767 && 0 == compare_tree_int (tree010,
9769 (TREE_TYPE (TREE_OPERAND
9771 && operand_equal_p (tree11, tree011, 0))
9772 return build2 ((code0 != LSHIFT_EXPR
9775 type, TREE_OPERAND (arg0, 0), tree11);
9781 /* In most languages, can't associate operations on floats through
9782 parentheses. Rather than remember where the parentheses were, we
9783 don't associate floats at all, unless the user has specified
9785 And, we need to make sure type is not saturating. */
9787 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9788 && !TYPE_SATURATING (type))
9790 tree var0, con0, lit0, minus_lit0;
9791 tree var1, con1, lit1, minus_lit1;
9794 /* Split both trees into variables, constants, and literals. Then
9795 associate each group together, the constants with literals,
9796 then the result with variables. This increases the chances of
9797 literals being recombined later and of generating relocatable
9798 expressions for the sum of a constant and literal. */
9799 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9800 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9801 code == MINUS_EXPR);
9803 /* With undefined overflow we can only associate constants
9804 with one variable. */
9805 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9806 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9812 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9813 tmp0 = TREE_OPERAND (tmp0, 0);
9814 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9815 tmp1 = TREE_OPERAND (tmp1, 0);
9816 /* The only case we can still associate with two variables
9817 is if they are the same, modulo negation. */
9818 if (!operand_equal_p (tmp0, tmp1, 0))
9822 /* Only do something if we found more than two objects. Otherwise,
9823 nothing has changed and we risk infinite recursion. */
9825 && (2 < ((var0 != 0) + (var1 != 0)
9826 + (con0 != 0) + (con1 != 0)
9827 + (lit0 != 0) + (lit1 != 0)
9828 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9830 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9831 if (code == MINUS_EXPR)
9834 var0 = associate_trees (var0, var1, code, type);
9835 con0 = associate_trees (con0, con1, code, type);
9836 lit0 = associate_trees (lit0, lit1, code, type);
9837 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9839 /* Preserve the MINUS_EXPR if the negative part of the literal is
9840 greater than the positive part. Otherwise, the multiplicative
9841 folding code (i.e extract_muldiv) may be fooled in case
9842 unsigned constants are subtracted, like in the following
9843 example: ((X*2 + 4) - 8U)/2. */
9844 if (minus_lit0 && lit0)
9846 if (TREE_CODE (lit0) == INTEGER_CST
9847 && TREE_CODE (minus_lit0) == INTEGER_CST
9848 && tree_int_cst_lt (lit0, minus_lit0))
9850 minus_lit0 = associate_trees (minus_lit0, lit0,
9856 lit0 = associate_trees (lit0, minus_lit0,
9864 return fold_convert (type,
9865 associate_trees (var0, minus_lit0,
9869 con0 = associate_trees (con0, minus_lit0,
9871 return fold_convert (type,
9872 associate_trees (var0, con0,
9877 con0 = associate_trees (con0, lit0, code, type);
9878 return fold_convert (type, associate_trees (var0, con0,
9886 /* Pointer simplifications for subtraction, simple reassociations. */
9887 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9889 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9890 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9891 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9893 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9894 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9895 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9896 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9897 return fold_build2 (PLUS_EXPR, type,
9898 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9899 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9901 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9902 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9904 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9905 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9906 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9908 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9911 /* A - (-B) -> A + B */
9912 if (TREE_CODE (arg1) == NEGATE_EXPR)
9913 return fold_build2 (PLUS_EXPR, type, op0,
9914 fold_convert (type, TREE_OPERAND (arg1, 0)));
9915 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9916 if (TREE_CODE (arg0) == NEGATE_EXPR
9917 && (FLOAT_TYPE_P (type)
9918 || INTEGRAL_TYPE_P (type))
9919 && negate_expr_p (arg1)
9920 && reorder_operands_p (arg0, arg1))
9921 return fold_build2 (MINUS_EXPR, type,
9922 fold_convert (type, negate_expr (arg1)),
9923 fold_convert (type, TREE_OPERAND (arg0, 0)));
9924 /* Convert -A - 1 to ~A. */
9925 if (INTEGRAL_TYPE_P (type)
9926 && TREE_CODE (arg0) == NEGATE_EXPR
9927 && integer_onep (arg1)
9928 && !TYPE_OVERFLOW_TRAPS (type))
9929 return fold_build1 (BIT_NOT_EXPR, type,
9930 fold_convert (type, TREE_OPERAND (arg0, 0)));
9932 /* Convert -1 - A to ~A. */
9933 if (INTEGRAL_TYPE_P (type)
9934 && integer_all_onesp (arg0))
9935 return fold_build1 (BIT_NOT_EXPR, type, op1);
9938 /* X - (X / CST) * CST is X % CST. */
9939 if (INTEGRAL_TYPE_P (type)
9940 && TREE_CODE (arg1) == MULT_EXPR
9941 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9942 && operand_equal_p (arg0,
9943 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9944 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9945 TREE_OPERAND (arg1, 1), 0))
9946 return fold_convert (type,
9947 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9948 arg0, TREE_OPERAND (arg1, 1)));
9950 if (! FLOAT_TYPE_P (type))
9952 if (integer_zerop (arg0))
9953 return negate_expr (fold_convert (type, arg1));
9954 if (integer_zerop (arg1))
9955 return non_lvalue (fold_convert (type, arg0));
9957 /* Fold A - (A & B) into ~B & A. */
9958 if (!TREE_SIDE_EFFECTS (arg0)
9959 && TREE_CODE (arg1) == BIT_AND_EXPR)
9961 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9963 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9964 return fold_build2 (BIT_AND_EXPR, type,
9965 fold_build1 (BIT_NOT_EXPR, type, arg10),
9966 fold_convert (type, arg0));
9968 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9970 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9971 return fold_build2 (BIT_AND_EXPR, type,
9972 fold_build1 (BIT_NOT_EXPR, type, arg11),
9973 fold_convert (type, arg0));
9977 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9978 any power of 2 minus 1. */
9979 if (TREE_CODE (arg0) == BIT_AND_EXPR
9980 && TREE_CODE (arg1) == BIT_AND_EXPR
9981 && operand_equal_p (TREE_OPERAND (arg0, 0),
9982 TREE_OPERAND (arg1, 0), 0))
9984 tree mask0 = TREE_OPERAND (arg0, 1);
9985 tree mask1 = TREE_OPERAND (arg1, 1);
9986 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9988 if (operand_equal_p (tem, mask1, 0))
9990 tem = fold_build2 (BIT_XOR_EXPR, type,
9991 TREE_OPERAND (arg0, 0), mask1);
9992 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9997 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9998 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9999 return non_lvalue (fold_convert (type, arg0));
10001 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10002 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10003 (-ARG1 + ARG0) reduces to -ARG1. */
10004 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10005 return negate_expr (fold_convert (type, arg1));
10007 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10008 __complex__ ( x, -y ). This is not the same for SNaNs or if
10009 signed zeros are involved. */
10010 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10011 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10012 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10014 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10015 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10016 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10017 bool arg0rz = false, arg0iz = false;
10018 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10019 || (arg0i && (arg0iz = real_zerop (arg0i))))
10021 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10022 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10023 if (arg0rz && arg1i && real_zerop (arg1i))
10025 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10027 : build1 (REALPART_EXPR, rtype, arg1));
10028 tree ip = arg0i ? arg0i
10029 : build1 (IMAGPART_EXPR, rtype, arg0);
10030 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10032 else if (arg0iz && arg1r && real_zerop (arg1r))
10034 tree rp = arg0r ? arg0r
10035 : build1 (REALPART_EXPR, rtype, arg0);
10036 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10038 : build1 (IMAGPART_EXPR, rtype, arg1));
10039 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10044 /* Fold &x - &x. This can happen from &x.foo - &x.
10045 This is unsafe for certain floats even in non-IEEE formats.
10046 In IEEE, it is unsafe because it does wrong for NaNs.
10047 Also note that operand_equal_p is always false if an operand
10050 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10051 && operand_equal_p (arg0, arg1, 0))
10052 return fold_convert (type, integer_zero_node);
10054 /* A - B -> A + (-B) if B is easily negatable. */
10055 if (negate_expr_p (arg1)
10056 && ((FLOAT_TYPE_P (type)
10057 /* Avoid this transformation if B is a positive REAL_CST. */
10058 && (TREE_CODE (arg1) != REAL_CST
10059 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10060 || INTEGRAL_TYPE_P (type)))
10061 return fold_build2 (PLUS_EXPR, type,
10062 fold_convert (type, arg0),
10063 fold_convert (type, negate_expr (arg1)));
10065 /* Try folding difference of addresses. */
10067 HOST_WIDE_INT diff;
10069 if ((TREE_CODE (arg0) == ADDR_EXPR
10070 || TREE_CODE (arg1) == ADDR_EXPR)
10071 && ptr_difference_const (arg0, arg1, &diff))
10072 return build_int_cst_type (type, diff);
10075 /* Fold &a[i] - &a[j] to i-j. */
10076 if (TREE_CODE (arg0) == ADDR_EXPR
10077 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10078 && TREE_CODE (arg1) == ADDR_EXPR
10079 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10081 tree aref0 = TREE_OPERAND (arg0, 0);
10082 tree aref1 = TREE_OPERAND (arg1, 0);
10083 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10084 TREE_OPERAND (aref1, 0), 0))
10086 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10087 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10088 tree esz = array_ref_element_size (aref0);
10089 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10090 return fold_build2 (MULT_EXPR, type, diff,
10091 fold_convert (type, esz));
10096 if (flag_unsafe_math_optimizations
10097 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10098 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10099 && (tem = distribute_real_division (code, type, arg0, arg1)))
10102 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10103 same or one. Make sure type is not saturating.
10104 fold_plusminus_mult_expr will re-associate. */
10105 if ((TREE_CODE (arg0) == MULT_EXPR
10106 || TREE_CODE (arg1) == MULT_EXPR)
10107 && !TYPE_SATURATING (type)
10108 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10110 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10118 /* (-A) * (-B) -> A * B */
10119 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10120 return fold_build2 (MULT_EXPR, type,
10121 fold_convert (type, TREE_OPERAND (arg0, 0)),
10122 fold_convert (type, negate_expr (arg1)));
10123 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10124 return fold_build2 (MULT_EXPR, type,
10125 fold_convert (type, negate_expr (arg0)),
10126 fold_convert (type, TREE_OPERAND (arg1, 0)));
10128 if (! FLOAT_TYPE_P (type))
10130 if (integer_zerop (arg1))
10131 return omit_one_operand (type, arg1, arg0);
10132 if (integer_onep (arg1))
10133 return non_lvalue (fold_convert (type, arg0));
10134 /* Transform x * -1 into -x. Make sure to do the negation
10135 on the original operand with conversions not stripped
10136 because we can only strip non-sign-changing conversions. */
10137 if (integer_all_onesp (arg1))
10138 return fold_convert (type, negate_expr (op0));
10139 /* Transform x * -C into -x * C if x is easily negatable. */
10140 if (TREE_CODE (arg1) == INTEGER_CST
10141 && tree_int_cst_sgn (arg1) == -1
10142 && negate_expr_p (arg0)
10143 && (tem = negate_expr (arg1)) != arg1
10144 && !TREE_OVERFLOW (tem))
10145 return fold_build2 (MULT_EXPR, type,
10146 fold_convert (type, negate_expr (arg0)), tem);
10148 /* (a * (1 << b)) is (a << b) */
10149 if (TREE_CODE (arg1) == LSHIFT_EXPR
10150 && integer_onep (TREE_OPERAND (arg1, 0)))
10151 return fold_build2 (LSHIFT_EXPR, type, op0,
10152 TREE_OPERAND (arg1, 1));
10153 if (TREE_CODE (arg0) == LSHIFT_EXPR
10154 && integer_onep (TREE_OPERAND (arg0, 0)))
10155 return fold_build2 (LSHIFT_EXPR, type, op1,
10156 TREE_OPERAND (arg0, 1));
10158 /* (A + A) * C -> A * 2 * C */
10159 if (TREE_CODE (arg0) == PLUS_EXPR
10160 && TREE_CODE (arg1) == INTEGER_CST
10161 && operand_equal_p (TREE_OPERAND (arg0, 0),
10162 TREE_OPERAND (arg0, 1), 0))
10163 return fold_build2 (MULT_EXPR, type,
10164 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10165 TREE_OPERAND (arg0, 1)),
10166 fold_build2 (MULT_EXPR, type,
10167 build_int_cst (type, 2) , arg1));
10169 strict_overflow_p = false;
10170 if (TREE_CODE (arg1) == INTEGER_CST
10171 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10172 &strict_overflow_p)))
10174 if (strict_overflow_p)
10175 fold_overflow_warning (("assuming signed overflow does not "
10176 "occur when simplifying "
10178 WARN_STRICT_OVERFLOW_MISC);
10179 return fold_convert (type, tem);
10182 /* Optimize z * conj(z) for integer complex numbers. */
10183 if (TREE_CODE (arg0) == CONJ_EXPR
10184 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10185 return fold_mult_zconjz (type, arg1);
10186 if (TREE_CODE (arg1) == CONJ_EXPR
10187 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10188 return fold_mult_zconjz (type, arg0);
10192 /* Maybe fold x * 0 to 0. The expressions aren't the same
10193 when x is NaN, since x * 0 is also NaN. Nor are they the
10194 same in modes with signed zeros, since multiplying a
10195 negative value by 0 gives -0, not +0. */
10196 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10197 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10198 && real_zerop (arg1))
10199 return omit_one_operand (type, arg1, arg0);
10200 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10201 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10202 && real_onep (arg1))
10203 return non_lvalue (fold_convert (type, arg0));
10205 /* Transform x * -1.0 into -x. */
10206 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10207 && real_minus_onep (arg1))
10208 return fold_convert (type, negate_expr (arg0));
10210 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10211 the result for floating point types due to rounding so it is applied
10212 only if -fassociative-math was specify. */
10213 if (flag_associative_math
10214 && TREE_CODE (arg0) == RDIV_EXPR
10215 && TREE_CODE (arg1) == REAL_CST
10216 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10218 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10221 return fold_build2 (RDIV_EXPR, type, tem,
10222 TREE_OPERAND (arg0, 1));
10225 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10226 if (operand_equal_p (arg0, arg1, 0))
10228 tree tem = fold_strip_sign_ops (arg0);
10229 if (tem != NULL_TREE)
10231 tem = fold_convert (type, tem);
10232 return fold_build2 (MULT_EXPR, type, tem, tem);
10236 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10237 This is not the same for NaNs or if signed zeros are
10239 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10240 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10241 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10242 && TREE_CODE (arg1) == COMPLEX_CST
10243 && real_zerop (TREE_REALPART (arg1)))
10245 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10246 if (real_onep (TREE_IMAGPART (arg1)))
10247 return fold_build2 (COMPLEX_EXPR, type,
10248 negate_expr (fold_build1 (IMAGPART_EXPR,
10250 fold_build1 (REALPART_EXPR, rtype, arg0));
10251 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10252 return fold_build2 (COMPLEX_EXPR, type,
10253 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10254 negate_expr (fold_build1 (REALPART_EXPR,
10258 /* Optimize z * conj(z) for floating point complex numbers.
10259 Guarded by flag_unsafe_math_optimizations as non-finite
10260 imaginary components don't produce scalar results. */
10261 if (flag_unsafe_math_optimizations
10262 && TREE_CODE (arg0) == CONJ_EXPR
10263 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10264 return fold_mult_zconjz (type, arg1);
10265 if (flag_unsafe_math_optimizations
10266 && TREE_CODE (arg1) == CONJ_EXPR
10267 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10268 return fold_mult_zconjz (type, arg0);
10270 if (flag_unsafe_math_optimizations)
10272 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10273 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10275 /* Optimizations of root(...)*root(...). */
10276 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10279 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10280 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10282 /* Optimize sqrt(x)*sqrt(x) as x. */
10283 if (BUILTIN_SQRT_P (fcode0)
10284 && operand_equal_p (arg00, arg10, 0)
10285 && ! HONOR_SNANS (TYPE_MODE (type)))
10288 /* Optimize root(x)*root(y) as root(x*y). */
10289 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10290 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10291 return build_call_expr (rootfn, 1, arg);
10294 /* Optimize expN(x)*expN(y) as expN(x+y). */
10295 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10297 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10298 tree arg = fold_build2 (PLUS_EXPR, type,
10299 CALL_EXPR_ARG (arg0, 0),
10300 CALL_EXPR_ARG (arg1, 0));
10301 return build_call_expr (expfn, 1, arg);
10304 /* Optimizations of pow(...)*pow(...). */
10305 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10306 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10307 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10309 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10310 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10311 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10312 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10314 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10315 if (operand_equal_p (arg01, arg11, 0))
10317 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10318 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10319 return build_call_expr (powfn, 2, arg, arg01);
10322 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10323 if (operand_equal_p (arg00, arg10, 0))
10325 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10326 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10327 return build_call_expr (powfn, 2, arg00, arg);
10331 /* Optimize tan(x)*cos(x) as sin(x). */
10332 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10333 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10334 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10335 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10336 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10337 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10338 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10339 CALL_EXPR_ARG (arg1, 0), 0))
10341 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10343 if (sinfn != NULL_TREE)
10344 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10347 /* Optimize x*pow(x,c) as pow(x,c+1). */
10348 if (fcode1 == BUILT_IN_POW
10349 || fcode1 == BUILT_IN_POWF
10350 || fcode1 == BUILT_IN_POWL)
10352 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10353 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10354 if (TREE_CODE (arg11) == REAL_CST
10355 && !TREE_OVERFLOW (arg11)
10356 && operand_equal_p (arg0, arg10, 0))
10358 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10362 c = TREE_REAL_CST (arg11);
10363 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10364 arg = build_real (type, c);
10365 return build_call_expr (powfn, 2, arg0, arg);
10369 /* Optimize pow(x,c)*x as pow(x,c+1). */
10370 if (fcode0 == BUILT_IN_POW
10371 || fcode0 == BUILT_IN_POWF
10372 || fcode0 == BUILT_IN_POWL)
10374 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10375 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10376 if (TREE_CODE (arg01) == REAL_CST
10377 && !TREE_OVERFLOW (arg01)
10378 && operand_equal_p (arg1, arg00, 0))
10380 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10384 c = TREE_REAL_CST (arg01);
10385 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10386 arg = build_real (type, c);
10387 return build_call_expr (powfn, 2, arg1, arg);
10391 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10392 if (! optimize_size
10393 && operand_equal_p (arg0, arg1, 0))
10395 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10399 tree arg = build_real (type, dconst2);
10400 return build_call_expr (powfn, 2, arg0, arg);
10409 if (integer_all_onesp (arg1))
10410 return omit_one_operand (type, arg1, arg0);
10411 if (integer_zerop (arg1))
10412 return non_lvalue (fold_convert (type, arg0));
10413 if (operand_equal_p (arg0, arg1, 0))
10414 return non_lvalue (fold_convert (type, arg0));
10416 /* ~X | X is -1. */
10417 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10420 t1 = fold_convert (type, integer_zero_node);
10421 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10422 return omit_one_operand (type, t1, arg1);
10425 /* X | ~X is -1. */
10426 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10427 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10429 t1 = fold_convert (type, integer_zero_node);
10430 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10431 return omit_one_operand (type, t1, arg0);
10434 /* Canonicalize (X & C1) | C2. */
10435 if (TREE_CODE (arg0) == BIT_AND_EXPR
10436 && TREE_CODE (arg1) == INTEGER_CST
10437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10439 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10440 int width = TYPE_PRECISION (type), w;
10441 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10442 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10443 hi2 = TREE_INT_CST_HIGH (arg1);
10444 lo2 = TREE_INT_CST_LOW (arg1);
10446 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10447 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10448 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10450 if (width > HOST_BITS_PER_WIDE_INT)
10452 mhi = (unsigned HOST_WIDE_INT) -1
10453 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10459 mlo = (unsigned HOST_WIDE_INT) -1
10460 >> (HOST_BITS_PER_WIDE_INT - width);
10463 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10464 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10465 return fold_build2 (BIT_IOR_EXPR, type,
10466 TREE_OPERAND (arg0, 0), arg1);
10468 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10469 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10470 mode which allows further optimizations. */
10477 for (w = BITS_PER_UNIT;
10478 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10481 unsigned HOST_WIDE_INT mask
10482 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10483 if (((lo1 | lo2) & mask) == mask
10484 && (lo1 & ~mask) == 0 && hi1 == 0)
10491 if (hi3 != hi1 || lo3 != lo1)
10492 return fold_build2 (BIT_IOR_EXPR, type,
10493 fold_build2 (BIT_AND_EXPR, type,
10494 TREE_OPERAND (arg0, 0),
10495 build_int_cst_wide (type,
10500 /* (X & Y) | Y is (X, Y). */
10501 if (TREE_CODE (arg0) == BIT_AND_EXPR
10502 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10503 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10504 /* (X & Y) | X is (Y, X). */
10505 if (TREE_CODE (arg0) == BIT_AND_EXPR
10506 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10507 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10508 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10509 /* X | (X & Y) is (Y, X). */
10510 if (TREE_CODE (arg1) == BIT_AND_EXPR
10511 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10512 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10513 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10514 /* X | (Y & X) is (Y, X). */
10515 if (TREE_CODE (arg1) == BIT_AND_EXPR
10516 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10517 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10518 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10520 t1 = distribute_bit_expr (code, type, arg0, arg1);
10521 if (t1 != NULL_TREE)
10524 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10526 This results in more efficient code for machines without a NAND
10527 instruction. Combine will canonicalize to the first form
10528 which will allow use of NAND instructions provided by the
10529 backend if they exist. */
10530 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10531 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10533 return fold_build1 (BIT_NOT_EXPR, type,
10534 build2 (BIT_AND_EXPR, type,
10535 fold_convert (type,
10536 TREE_OPERAND (arg0, 0)),
10537 fold_convert (type,
10538 TREE_OPERAND (arg1, 0))));
10541 /* See if this can be simplified into a rotate first. If that
10542 is unsuccessful continue in the association code. */
10546 if (integer_zerop (arg1))
10547 return non_lvalue (fold_convert (type, arg0));
10548 if (integer_all_onesp (arg1))
10549 return fold_build1 (BIT_NOT_EXPR, type, op0);
10550 if (operand_equal_p (arg0, arg1, 0))
10551 return omit_one_operand (type, integer_zero_node, arg0);
10553 /* ~X ^ X is -1. */
10554 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10555 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10557 t1 = fold_convert (type, integer_zero_node);
10558 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10559 return omit_one_operand (type, t1, arg1);
10562 /* X ^ ~X is -1. */
10563 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10564 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10566 t1 = fold_convert (type, integer_zero_node);
10567 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10568 return omit_one_operand (type, t1, arg0);
10571 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10572 with a constant, and the two constants have no bits in common,
10573 we should treat this as a BIT_IOR_EXPR since this may produce more
10574 simplifications. */
10575 if (TREE_CODE (arg0) == BIT_AND_EXPR
10576 && TREE_CODE (arg1) == BIT_AND_EXPR
10577 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10578 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10579 && integer_zerop (const_binop (BIT_AND_EXPR,
10580 TREE_OPERAND (arg0, 1),
10581 TREE_OPERAND (arg1, 1), 0)))
10583 code = BIT_IOR_EXPR;
10587 /* (X | Y) ^ X -> Y & ~ X*/
10588 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10589 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10591 tree t2 = TREE_OPERAND (arg0, 1);
10592 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10594 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10595 fold_convert (type, t1));
10599 /* (Y | X) ^ X -> Y & ~ X*/
10600 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10601 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10603 tree t2 = TREE_OPERAND (arg0, 0);
10604 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10606 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10607 fold_convert (type, t1));
10611 /* X ^ (X | Y) -> Y & ~ X*/
10612 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10613 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10615 tree t2 = TREE_OPERAND (arg1, 1);
10616 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10618 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10619 fold_convert (type, t1));
10623 /* X ^ (Y | X) -> Y & ~ X*/
10624 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10625 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10627 tree t2 = TREE_OPERAND (arg1, 0);
10628 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10630 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10631 fold_convert (type, t1));
10635 /* Convert ~X ^ ~Y to X ^ Y. */
10636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10637 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10638 return fold_build2 (code, type,
10639 fold_convert (type, TREE_OPERAND (arg0, 0)),
10640 fold_convert (type, TREE_OPERAND (arg1, 0)));
10642 /* Convert ~X ^ C to X ^ ~C. */
10643 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10644 && TREE_CODE (arg1) == INTEGER_CST)
10645 return fold_build2 (code, type,
10646 fold_convert (type, TREE_OPERAND (arg0, 0)),
10647 fold_build1 (BIT_NOT_EXPR, type, arg1));
10649 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10650 if (TREE_CODE (arg0) == BIT_AND_EXPR
10651 && integer_onep (TREE_OPERAND (arg0, 1))
10652 && integer_onep (arg1))
10653 return fold_build2 (EQ_EXPR, type, arg0,
10654 build_int_cst (TREE_TYPE (arg0), 0));
10656 /* Fold (X & Y) ^ Y as ~X & Y. */
10657 if (TREE_CODE (arg0) == BIT_AND_EXPR
10658 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10660 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10661 return fold_build2 (BIT_AND_EXPR, type,
10662 fold_build1 (BIT_NOT_EXPR, type, tem),
10663 fold_convert (type, arg1));
10665 /* Fold (X & Y) ^ X as ~Y & X. */
10666 if (TREE_CODE (arg0) == BIT_AND_EXPR
10667 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10668 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10670 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10671 return fold_build2 (BIT_AND_EXPR, type,
10672 fold_build1 (BIT_NOT_EXPR, type, tem),
10673 fold_convert (type, arg1));
10675 /* Fold X ^ (X & Y) as X & ~Y. */
10676 if (TREE_CODE (arg1) == BIT_AND_EXPR
10677 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10679 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10680 return fold_build2 (BIT_AND_EXPR, type,
10681 fold_convert (type, arg0),
10682 fold_build1 (BIT_NOT_EXPR, type, tem));
10684 /* Fold X ^ (Y & X) as ~Y & X. */
10685 if (TREE_CODE (arg1) == BIT_AND_EXPR
10686 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10687 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10689 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10690 return fold_build2 (BIT_AND_EXPR, type,
10691 fold_build1 (BIT_NOT_EXPR, type, tem),
10692 fold_convert (type, arg0));
10695 /* See if this can be simplified into a rotate first. If that
10696 is unsuccessful continue in the association code. */
10700 if (integer_all_onesp (arg1))
10701 return non_lvalue (fold_convert (type, arg0));
10702 if (integer_zerop (arg1))
10703 return omit_one_operand (type, arg1, arg0);
10704 if (operand_equal_p (arg0, arg1, 0))
10705 return non_lvalue (fold_convert (type, arg0));
10707 /* ~X & X is always zero. */
10708 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10709 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10710 return omit_one_operand (type, integer_zero_node, arg1);
10712 /* X & ~X is always zero. */
10713 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10714 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10715 return omit_one_operand (type, integer_zero_node, arg0);
10717 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10718 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10719 && TREE_CODE (arg1) == INTEGER_CST
10720 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10722 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10723 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10724 TREE_OPERAND (arg0, 0), tmp1);
10725 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10726 TREE_OPERAND (arg0, 1), tmp1);
10727 return fold_convert (type,
10728 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10732 /* (X | Y) & Y is (X, Y). */
10733 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10734 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10735 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10736 /* (X | Y) & X is (Y, X). */
10737 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10738 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10739 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10740 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10741 /* X & (X | Y) is (Y, X). */
10742 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10743 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10744 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10745 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10746 /* X & (Y | X) is (Y, X). */
10747 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10748 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10749 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10750 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10752 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10753 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10754 && integer_onep (TREE_OPERAND (arg0, 1))
10755 && integer_onep (arg1))
10757 tem = TREE_OPERAND (arg0, 0);
10758 return fold_build2 (EQ_EXPR, type,
10759 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10760 build_int_cst (TREE_TYPE (tem), 1)),
10761 build_int_cst (TREE_TYPE (tem), 0));
10763 /* Fold ~X & 1 as (X & 1) == 0. */
10764 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10765 && integer_onep (arg1))
10767 tem = TREE_OPERAND (arg0, 0);
10768 return fold_build2 (EQ_EXPR, type,
10769 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10770 build_int_cst (TREE_TYPE (tem), 1)),
10771 build_int_cst (TREE_TYPE (tem), 0));
10774 /* Fold (X ^ Y) & Y as ~X & Y. */
10775 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10776 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10778 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10779 return fold_build2 (BIT_AND_EXPR, type,
10780 fold_build1 (BIT_NOT_EXPR, type, tem),
10781 fold_convert (type, arg1));
10783 /* Fold (X ^ Y) & X as ~Y & X. */
10784 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10785 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10786 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10788 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10789 return fold_build2 (BIT_AND_EXPR, type,
10790 fold_build1 (BIT_NOT_EXPR, type, tem),
10791 fold_convert (type, arg1));
10793 /* Fold X & (X ^ Y) as X & ~Y. */
10794 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10795 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10797 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10798 return fold_build2 (BIT_AND_EXPR, type,
10799 fold_convert (type, arg0),
10800 fold_build1 (BIT_NOT_EXPR, type, tem));
10802 /* Fold X & (Y ^ X) as ~Y & X. */
10803 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10804 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10805 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10807 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10808 return fold_build2 (BIT_AND_EXPR, type,
10809 fold_build1 (BIT_NOT_EXPR, type, tem),
10810 fold_convert (type, arg0));
10813 t1 = distribute_bit_expr (code, type, arg0, arg1);
10814 if (t1 != NULL_TREE)
10816 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10817 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10818 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10821 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10823 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10824 && (~TREE_INT_CST_LOW (arg1)
10825 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10826 return fold_convert (type, TREE_OPERAND (arg0, 0));
10829 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10831 This results in more efficient code for machines without a NOR
10832 instruction. Combine will canonicalize to the first form
10833 which will allow use of NOR instructions provided by the
10834 backend if they exist. */
10835 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10836 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10838 return fold_build1 (BIT_NOT_EXPR, type,
10839 build2 (BIT_IOR_EXPR, type,
10840 fold_convert (type,
10841 TREE_OPERAND (arg0, 0)),
10842 fold_convert (type,
10843 TREE_OPERAND (arg1, 0))));
10846 /* If arg0 is derived from the address of an object or function, we may
10847 be able to fold this expression using the object or function's
10849 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10851 unsigned HOST_WIDE_INT modulus, residue;
10852 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10854 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10856 /* This works because modulus is a power of 2. If this weren't the
10857 case, we'd have to replace it by its greatest power-of-2
10858 divisor: modulus & -modulus. */
10860 return build_int_cst (type, residue & low);
10863 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10864 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10865 if the new mask might be further optimized. */
10866 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10867 || TREE_CODE (arg0) == RSHIFT_EXPR)
10868 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10869 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10870 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10871 < TYPE_PRECISION (TREE_TYPE (arg0))
10872 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10873 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10875 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10876 unsigned HOST_WIDE_INT mask
10877 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10878 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10879 tree shift_type = TREE_TYPE (arg0);
10881 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10882 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10883 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10884 && TYPE_PRECISION (TREE_TYPE (arg0))
10885 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10887 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10888 tree arg00 = TREE_OPERAND (arg0, 0);
10889 /* See if more bits can be proven as zero because of
10891 if (TREE_CODE (arg00) == NOP_EXPR
10892 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10894 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10895 if (TYPE_PRECISION (inner_type)
10896 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10897 && TYPE_PRECISION (inner_type) < prec)
10899 prec = TYPE_PRECISION (inner_type);
10900 /* See if we can shorten the right shift. */
10902 shift_type = inner_type;
10905 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10906 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10907 zerobits <<= prec - shiftc;
10908 /* For arithmetic shift if sign bit could be set, zerobits
10909 can contain actually sign bits, so no transformation is
10910 possible, unless MASK masks them all away. In that
10911 case the shift needs to be converted into logical shift. */
10912 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10913 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10915 if ((mask & zerobits) == 0)
10916 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10922 /* ((X << 16) & 0xff00) is (X, 0). */
10923 if ((mask & zerobits) == mask)
10924 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10926 newmask = mask | zerobits;
10927 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10931 /* Only do the transformation if NEWMASK is some integer
10933 for (prec = BITS_PER_UNIT;
10934 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10935 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10937 if (prec < HOST_BITS_PER_WIDE_INT
10938 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10940 if (shift_type != TREE_TYPE (arg0))
10942 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10943 fold_convert (shift_type,
10944 TREE_OPERAND (arg0, 0)),
10945 TREE_OPERAND (arg0, 1));
10946 tem = fold_convert (type, tem);
10950 return fold_build2 (BIT_AND_EXPR, type, tem,
10951 build_int_cst_type (TREE_TYPE (op1),
10960 /* Don't touch a floating-point divide by zero unless the mode
10961 of the constant can represent infinity. */
10962 if (TREE_CODE (arg1) == REAL_CST
10963 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10964 && real_zerop (arg1))
10967 /* Optimize A / A to 1.0 if we don't care about
10968 NaNs or Infinities. Skip the transformation
10969 for non-real operands. */
10970 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10971 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10972 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10973 && operand_equal_p (arg0, arg1, 0))
10975 tree r = build_real (TREE_TYPE (arg0), dconst1);
10977 return omit_two_operands (type, r, arg0, arg1);
10980 /* The complex version of the above A / A optimization. */
10981 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10982 && operand_equal_p (arg0, arg1, 0))
10984 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10985 if (! HONOR_NANS (TYPE_MODE (elem_type))
10986 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10988 tree r = build_real (elem_type, dconst1);
10989 /* omit_two_operands will call fold_convert for us. */
10990 return omit_two_operands (type, r, arg0, arg1);
10994 /* (-A) / (-B) -> A / B */
10995 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10996 return fold_build2 (RDIV_EXPR, type,
10997 TREE_OPERAND (arg0, 0),
10998 negate_expr (arg1));
10999 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11000 return fold_build2 (RDIV_EXPR, type,
11001 negate_expr (arg0),
11002 TREE_OPERAND (arg1, 0));
11004 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11005 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11006 && real_onep (arg1))
11007 return non_lvalue (fold_convert (type, arg0));
11009 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11010 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11011 && real_minus_onep (arg1))
11012 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11014 /* If ARG1 is a constant, we can convert this to a multiply by the
11015 reciprocal. This does not have the same rounding properties,
11016 so only do this if -freciprocal-math. We can actually
11017 always safely do it if ARG1 is a power of two, but it's hard to
11018 tell if it is or not in a portable manner. */
11019 if (TREE_CODE (arg1) == REAL_CST)
11021 if (flag_reciprocal_math
11022 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11024 return fold_build2 (MULT_EXPR, type, arg0, tem);
11025 /* Find the reciprocal if optimizing and the result is exact. */
11029 r = TREE_REAL_CST (arg1);
11030 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11032 tem = build_real (type, r);
11033 return fold_build2 (MULT_EXPR, type,
11034 fold_convert (type, arg0), tem);
11038 /* Convert A/B/C to A/(B*C). */
11039 if (flag_reciprocal_math
11040 && TREE_CODE (arg0) == RDIV_EXPR)
11041 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11042 fold_build2 (MULT_EXPR, type,
11043 TREE_OPERAND (arg0, 1), arg1));
11045 /* Convert A/(B/C) to (A/B)*C. */
11046 if (flag_reciprocal_math
11047 && TREE_CODE (arg1) == RDIV_EXPR)
11048 return fold_build2 (MULT_EXPR, type,
11049 fold_build2 (RDIV_EXPR, type, arg0,
11050 TREE_OPERAND (arg1, 0)),
11051 TREE_OPERAND (arg1, 1));
11053 /* Convert C1/(X*C2) into (C1/C2)/X. */
11054 if (flag_reciprocal_math
11055 && TREE_CODE (arg1) == MULT_EXPR
11056 && TREE_CODE (arg0) == REAL_CST
11057 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11059 tree tem = const_binop (RDIV_EXPR, arg0,
11060 TREE_OPERAND (arg1, 1), 0);
11062 return fold_build2 (RDIV_EXPR, type, tem,
11063 TREE_OPERAND (arg1, 0));
11066 if (flag_unsafe_math_optimizations)
11068 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11069 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11071 /* Optimize sin(x)/cos(x) as tan(x). */
11072 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11073 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11074 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11075 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11076 CALL_EXPR_ARG (arg1, 0), 0))
11078 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11080 if (tanfn != NULL_TREE)
11081 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11084 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11085 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11086 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11087 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11088 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11089 CALL_EXPR_ARG (arg1, 0), 0))
11091 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11093 if (tanfn != NULL_TREE)
11095 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11096 return fold_build2 (RDIV_EXPR, type,
11097 build_real (type, dconst1), tmp);
11101 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11102 NaNs or Infinities. */
11103 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11104 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11105 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11107 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11108 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11110 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11111 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11112 && operand_equal_p (arg00, arg01, 0))
11114 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11116 if (cosfn != NULL_TREE)
11117 return build_call_expr (cosfn, 1, arg00);
11121 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11122 NaNs or Infinities. */
11123 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11124 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11125 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11127 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11128 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11130 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11131 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11132 && operand_equal_p (arg00, arg01, 0))
11134 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11136 if (cosfn != NULL_TREE)
11138 tree tmp = build_call_expr (cosfn, 1, arg00);
11139 return fold_build2 (RDIV_EXPR, type,
11140 build_real (type, dconst1),
11146 /* Optimize pow(x,c)/x as pow(x,c-1). */
11147 if (fcode0 == BUILT_IN_POW
11148 || fcode0 == BUILT_IN_POWF
11149 || fcode0 == BUILT_IN_POWL)
11151 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11152 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11153 if (TREE_CODE (arg01) == REAL_CST
11154 && !TREE_OVERFLOW (arg01)
11155 && operand_equal_p (arg1, arg00, 0))
11157 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11161 c = TREE_REAL_CST (arg01);
11162 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11163 arg = build_real (type, c);
11164 return build_call_expr (powfn, 2, arg1, arg);
11168 /* Optimize a/root(b/c) into a*root(c/b). */
11169 if (BUILTIN_ROOT_P (fcode1))
11171 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11173 if (TREE_CODE (rootarg) == RDIV_EXPR)
11175 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11176 tree b = TREE_OPERAND (rootarg, 0);
11177 tree c = TREE_OPERAND (rootarg, 1);
11179 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11181 tmp = build_call_expr (rootfn, 1, tmp);
11182 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11186 /* Optimize x/expN(y) into x*expN(-y). */
11187 if (BUILTIN_EXPONENT_P (fcode1))
11189 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11190 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11191 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11192 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11195 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11196 if (fcode1 == BUILT_IN_POW
11197 || fcode1 == BUILT_IN_POWF
11198 || fcode1 == BUILT_IN_POWL)
11200 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11201 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11202 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11203 tree neg11 = fold_convert (type, negate_expr (arg11));
11204 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11205 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11210 case TRUNC_DIV_EXPR:
11211 case FLOOR_DIV_EXPR:
11212 /* Simplify A / (B << N) where A and B are positive and B is
11213 a power of 2, to A >> (N + log2(B)). */
11214 strict_overflow_p = false;
11215 if (TREE_CODE (arg1) == LSHIFT_EXPR
11216 && (TYPE_UNSIGNED (type)
11217 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11219 tree sval = TREE_OPERAND (arg1, 0);
11220 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11222 tree sh_cnt = TREE_OPERAND (arg1, 1);
11223 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11225 if (strict_overflow_p)
11226 fold_overflow_warning (("assuming signed overflow does not "
11227 "occur when simplifying A / (B << N)"),
11228 WARN_STRICT_OVERFLOW_MISC);
11230 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11231 sh_cnt, build_int_cst (NULL_TREE, pow2));
11232 return fold_build2 (RSHIFT_EXPR, type,
11233 fold_convert (type, arg0), sh_cnt);
11237 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11238 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11239 if (INTEGRAL_TYPE_P (type)
11240 && TYPE_UNSIGNED (type)
11241 && code == FLOOR_DIV_EXPR)
11242 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11246 case ROUND_DIV_EXPR:
11247 case CEIL_DIV_EXPR:
11248 case EXACT_DIV_EXPR:
11249 if (integer_onep (arg1))
11250 return non_lvalue (fold_convert (type, arg0));
11251 if (integer_zerop (arg1))
11253 /* X / -1 is -X. */
11254 if (!TYPE_UNSIGNED (type)
11255 && TREE_CODE (arg1) == INTEGER_CST
11256 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11257 && TREE_INT_CST_HIGH (arg1) == -1)
11258 return fold_convert (type, negate_expr (arg0));
11260 /* Convert -A / -B to A / B when the type is signed and overflow is
11262 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11263 && TREE_CODE (arg0) == NEGATE_EXPR
11264 && negate_expr_p (arg1))
11266 if (INTEGRAL_TYPE_P (type))
11267 fold_overflow_warning (("assuming signed overflow does not occur "
11268 "when distributing negation across "
11270 WARN_STRICT_OVERFLOW_MISC);
11271 return fold_build2 (code, type,
11272 fold_convert (type, TREE_OPERAND (arg0, 0)),
11273 negate_expr (arg1));
11275 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11276 && TREE_CODE (arg1) == NEGATE_EXPR
11277 && negate_expr_p (arg0))
11279 if (INTEGRAL_TYPE_P (type))
11280 fold_overflow_warning (("assuming signed overflow does not occur "
11281 "when distributing negation across "
11283 WARN_STRICT_OVERFLOW_MISC);
11284 return fold_build2 (code, type, negate_expr (arg0),
11285 TREE_OPERAND (arg1, 0));
11288 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11289 operation, EXACT_DIV_EXPR.
11291 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11292 At one time others generated faster code, it's not clear if they do
11293 after the last round to changes to the DIV code in expmed.c. */
11294 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11295 && multiple_of_p (type, arg0, arg1))
11296 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11298 strict_overflow_p = false;
11299 if (TREE_CODE (arg1) == INTEGER_CST
11300 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11301 &strict_overflow_p)))
11303 if (strict_overflow_p)
11304 fold_overflow_warning (("assuming signed overflow does not occur "
11305 "when simplifying division"),
11306 WARN_STRICT_OVERFLOW_MISC);
11307 return fold_convert (type, tem);
11312 case CEIL_MOD_EXPR:
11313 case FLOOR_MOD_EXPR:
11314 case ROUND_MOD_EXPR:
11315 case TRUNC_MOD_EXPR:
11316 /* X % 1 is always zero, but be sure to preserve any side
11318 if (integer_onep (arg1))
11319 return omit_one_operand (type, integer_zero_node, arg0);
11321 /* X % 0, return X % 0 unchanged so that we can get the
11322 proper warnings and errors. */
11323 if (integer_zerop (arg1))
11326 /* 0 % X is always zero, but be sure to preserve any side
11327 effects in X. Place this after checking for X == 0. */
11328 if (integer_zerop (arg0))
11329 return omit_one_operand (type, integer_zero_node, arg1);
11331 /* X % -1 is zero. */
11332 if (!TYPE_UNSIGNED (type)
11333 && TREE_CODE (arg1) == INTEGER_CST
11334 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11335 && TREE_INT_CST_HIGH (arg1) == -1)
11336 return omit_one_operand (type, integer_zero_node, arg0);
11338 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11339 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11340 strict_overflow_p = false;
11341 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11342 && (TYPE_UNSIGNED (type)
11343 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11346 /* Also optimize A % (C << N) where C is a power of 2,
11347 to A & ((C << N) - 1). */
11348 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11349 c = TREE_OPERAND (arg1, 0);
11351 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11353 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11354 build_int_cst (TREE_TYPE (arg1), 1));
11355 if (strict_overflow_p)
11356 fold_overflow_warning (("assuming signed overflow does not "
11357 "occur when simplifying "
11358 "X % (power of two)"),
11359 WARN_STRICT_OVERFLOW_MISC);
11360 return fold_build2 (BIT_AND_EXPR, type,
11361 fold_convert (type, arg0),
11362 fold_convert (type, mask));
11366 /* X % -C is the same as X % C. */
11367 if (code == TRUNC_MOD_EXPR
11368 && !TYPE_UNSIGNED (type)
11369 && TREE_CODE (arg1) == INTEGER_CST
11370 && !TREE_OVERFLOW (arg1)
11371 && TREE_INT_CST_HIGH (arg1) < 0
11372 && !TYPE_OVERFLOW_TRAPS (type)
11373 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11374 && !sign_bit_p (arg1, arg1))
11375 return fold_build2 (code, type, fold_convert (type, arg0),
11376 fold_convert (type, negate_expr (arg1)));
11378 /* X % -Y is the same as X % Y. */
11379 if (code == TRUNC_MOD_EXPR
11380 && !TYPE_UNSIGNED (type)
11381 && TREE_CODE (arg1) == NEGATE_EXPR
11382 && !TYPE_OVERFLOW_TRAPS (type))
11383 return fold_build2 (code, type, fold_convert (type, arg0),
11384 fold_convert (type, TREE_OPERAND (arg1, 0)));
11386 if (TREE_CODE (arg1) == INTEGER_CST
11387 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11388 &strict_overflow_p)))
11390 if (strict_overflow_p)
11391 fold_overflow_warning (("assuming signed overflow does not occur "
11392 "when simplifying modulos"),
11393 WARN_STRICT_OVERFLOW_MISC);
11394 return fold_convert (type, tem);
11401 if (integer_all_onesp (arg0))
11402 return omit_one_operand (type, arg0, arg1);
11406 /* Optimize -1 >> x for arithmetic right shifts. */
11407 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11408 return omit_one_operand (type, arg0, arg1);
11409 /* ... fall through ... */
11413 if (integer_zerop (arg1))
11414 return non_lvalue (fold_convert (type, arg0));
11415 if (integer_zerop (arg0))
11416 return omit_one_operand (type, arg0, arg1);
11418 /* Since negative shift count is not well-defined,
11419 don't try to compute it in the compiler. */
11420 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11423 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11424 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11425 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11426 && host_integerp (TREE_OPERAND (arg0, 1), false)
11427 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11429 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11430 + TREE_INT_CST_LOW (arg1));
11432 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11433 being well defined. */
11434 if (low >= TYPE_PRECISION (type))
11436 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11437 low = low % TYPE_PRECISION (type);
11438 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11439 return build_int_cst (type, 0);
11441 low = TYPE_PRECISION (type) - 1;
11444 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11445 build_int_cst (type, low));
11448 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11449 into x & ((unsigned)-1 >> c) for unsigned types. */
11450 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11451 || (TYPE_UNSIGNED (type)
11452 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11453 && host_integerp (arg1, false)
11454 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11455 && host_integerp (TREE_OPERAND (arg0, 1), false)
11456 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11458 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11459 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11465 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11467 lshift = build_int_cst (type, -1);
11468 lshift = int_const_binop (code, lshift, arg1, 0);
11470 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11474 /* Rewrite an LROTATE_EXPR by a constant into an
11475 RROTATE_EXPR by a new constant. */
11476 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11478 tree tem = build_int_cst (TREE_TYPE (arg1),
11479 TYPE_PRECISION (type));
11480 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11481 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11484 /* If we have a rotate of a bit operation with the rotate count and
11485 the second operand of the bit operation both constant,
11486 permute the two operations. */
11487 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11488 && (TREE_CODE (arg0) == BIT_AND_EXPR
11489 || TREE_CODE (arg0) == BIT_IOR_EXPR
11490 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11491 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11492 return fold_build2 (TREE_CODE (arg0), type,
11493 fold_build2 (code, type,
11494 TREE_OPERAND (arg0, 0), arg1),
11495 fold_build2 (code, type,
11496 TREE_OPERAND (arg0, 1), arg1));
11498 /* Two consecutive rotates adding up to the precision of the
11499 type can be ignored. */
11500 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11501 && TREE_CODE (arg0) == RROTATE_EXPR
11502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11503 && TREE_INT_CST_HIGH (arg1) == 0
11504 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11505 && ((TREE_INT_CST_LOW (arg1)
11506 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11507 == (unsigned int) TYPE_PRECISION (type)))
11508 return TREE_OPERAND (arg0, 0);
11510 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11511 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11512 if the latter can be further optimized. */
11513 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11514 && TREE_CODE (arg0) == BIT_AND_EXPR
11515 && TREE_CODE (arg1) == INTEGER_CST
11516 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11518 tree mask = fold_build2 (code, type,
11519 fold_convert (type, TREE_OPERAND (arg0, 1)),
11521 tree shift = fold_build2 (code, type,
11522 fold_convert (type, TREE_OPERAND (arg0, 0)),
11524 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11532 if (operand_equal_p (arg0, arg1, 0))
11533 return omit_one_operand (type, arg0, arg1);
11534 if (INTEGRAL_TYPE_P (type)
11535 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11536 return omit_one_operand (type, arg1, arg0);
11537 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11543 if (operand_equal_p (arg0, arg1, 0))
11544 return omit_one_operand (type, arg0, arg1);
11545 if (INTEGRAL_TYPE_P (type)
11546 && TYPE_MAX_VALUE (type)
11547 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11548 return omit_one_operand (type, arg1, arg0);
11549 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11554 case TRUTH_ANDIF_EXPR:
11555 /* Note that the operands of this must be ints
11556 and their values must be 0 or 1.
11557 ("true" is a fixed value perhaps depending on the language.) */
11558 /* If first arg is constant zero, return it. */
11559 if (integer_zerop (arg0))
11560 return fold_convert (type, arg0);
11561 case TRUTH_AND_EXPR:
11562 /* If either arg is constant true, drop it. */
11563 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11564 return non_lvalue (fold_convert (type, arg1));
11565 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11566 /* Preserve sequence points. */
11567 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11568 return non_lvalue (fold_convert (type, arg0));
11569 /* If second arg is constant zero, result is zero, but first arg
11570 must be evaluated. */
11571 if (integer_zerop (arg1))
11572 return omit_one_operand (type, arg1, arg0);
11573 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11574 case will be handled here. */
11575 if (integer_zerop (arg0))
11576 return omit_one_operand (type, arg0, arg1);
11578 /* !X && X is always false. */
11579 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11581 return omit_one_operand (type, integer_zero_node, arg1);
11582 /* X && !X is always false. */
11583 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11584 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11585 return omit_one_operand (type, integer_zero_node, arg0);
11587 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11588 means A >= Y && A != MAX, but in this case we know that
11591 if (!TREE_SIDE_EFFECTS (arg0)
11592 && !TREE_SIDE_EFFECTS (arg1))
11594 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11595 if (tem && !operand_equal_p (tem, arg0, 0))
11596 return fold_build2 (code, type, tem, arg1);
11598 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11599 if (tem && !operand_equal_p (tem, arg1, 0))
11600 return fold_build2 (code, type, arg0, tem);
11604 /* We only do these simplifications if we are optimizing. */
11608 /* Check for things like (A || B) && (A || C). We can convert this
11609 to A || (B && C). Note that either operator can be any of the four
11610 truth and/or operations and the transformation will still be
11611 valid. Also note that we only care about order for the
11612 ANDIF and ORIF operators. If B contains side effects, this
11613 might change the truth-value of A. */
11614 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11615 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11616 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11617 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11618 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11619 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11621 tree a00 = TREE_OPERAND (arg0, 0);
11622 tree a01 = TREE_OPERAND (arg0, 1);
11623 tree a10 = TREE_OPERAND (arg1, 0);
11624 tree a11 = TREE_OPERAND (arg1, 1);
11625 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11626 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11627 && (code == TRUTH_AND_EXPR
11628 || code == TRUTH_OR_EXPR));
11630 if (operand_equal_p (a00, a10, 0))
11631 return fold_build2 (TREE_CODE (arg0), type, a00,
11632 fold_build2 (code, type, a01, a11));
11633 else if (commutative && operand_equal_p (a00, a11, 0))
11634 return fold_build2 (TREE_CODE (arg0), type, a00,
11635 fold_build2 (code, type, a01, a10));
11636 else if (commutative && operand_equal_p (a01, a10, 0))
11637 return fold_build2 (TREE_CODE (arg0), type, a01,
11638 fold_build2 (code, type, a00, a11));
11640 /* This case if tricky because we must either have commutative
11641 operators or else A10 must not have side-effects. */
11643 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11644 && operand_equal_p (a01, a11, 0))
11645 return fold_build2 (TREE_CODE (arg0), type,
11646 fold_build2 (code, type, a00, a10),
11650 /* See if we can build a range comparison. */
11651 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11654 /* Check for the possibility of merging component references. If our
11655 lhs is another similar operation, try to merge its rhs with our
11656 rhs. Then try to merge our lhs and rhs. */
11657 if (TREE_CODE (arg0) == code
11658 && 0 != (tem = fold_truthop (code, type,
11659 TREE_OPERAND (arg0, 1), arg1)))
11660 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11662 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11667 case TRUTH_ORIF_EXPR:
11668 /* Note that the operands of this must be ints
11669 and their values must be 0 or true.
11670 ("true" is a fixed value perhaps depending on the language.) */
11671 /* If first arg is constant true, return it. */
11672 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11673 return fold_convert (type, arg0);
11674 case TRUTH_OR_EXPR:
11675 /* If either arg is constant zero, drop it. */
11676 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11677 return non_lvalue (fold_convert (type, arg1));
11678 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11679 /* Preserve sequence points. */
11680 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11681 return non_lvalue (fold_convert (type, arg0));
11682 /* If second arg is constant true, result is true, but we must
11683 evaluate first arg. */
11684 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11685 return omit_one_operand (type, arg1, arg0);
11686 /* Likewise for first arg, but note this only occurs here for
11688 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11689 return omit_one_operand (type, arg0, arg1);
11691 /* !X || X is always true. */
11692 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11693 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11694 return omit_one_operand (type, integer_one_node, arg1);
11695 /* X || !X is always true. */
11696 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11697 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11698 return omit_one_operand (type, integer_one_node, arg0);
11702 case TRUTH_XOR_EXPR:
11703 /* If the second arg is constant zero, drop it. */
11704 if (integer_zerop (arg1))
11705 return non_lvalue (fold_convert (type, arg0));
11706 /* If the second arg is constant true, this is a logical inversion. */
11707 if (integer_onep (arg1))
11709 /* Only call invert_truthvalue if operand is a truth value. */
11710 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11711 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11713 tem = invert_truthvalue (arg0);
11714 return non_lvalue (fold_convert (type, tem));
11716 /* Identical arguments cancel to zero. */
11717 if (operand_equal_p (arg0, arg1, 0))
11718 return omit_one_operand (type, integer_zero_node, arg0);
11720 /* !X ^ X is always true. */
11721 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11722 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11723 return omit_one_operand (type, integer_one_node, arg1);
11725 /* X ^ !X is always true. */
11726 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11727 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11728 return omit_one_operand (type, integer_one_node, arg0);
11734 tem = fold_comparison (code, type, op0, op1);
11735 if (tem != NULL_TREE)
11738 /* bool_var != 0 becomes bool_var. */
11739 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11740 && code == NE_EXPR)
11741 return non_lvalue (fold_convert (type, arg0));
11743 /* bool_var == 1 becomes bool_var. */
11744 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11745 && code == EQ_EXPR)
11746 return non_lvalue (fold_convert (type, arg0));
11748 /* bool_var != 1 becomes !bool_var. */
11749 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11750 && code == NE_EXPR)
11751 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11753 /* bool_var == 0 becomes !bool_var. */
11754 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11755 && code == EQ_EXPR)
11756 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11758 /* If this is an equality comparison of the address of two non-weak,
11759 unaliased symbols neither of which are extern (since we do not
11760 have access to attributes for externs), then we know the result. */
11761 if (TREE_CODE (arg0) == ADDR_EXPR
11762 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11763 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11764 && ! lookup_attribute ("alias",
11765 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11766 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11767 && TREE_CODE (arg1) == ADDR_EXPR
11768 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11769 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11770 && ! lookup_attribute ("alias",
11771 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11772 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11774 /* We know that we're looking at the address of two
11775 non-weak, unaliased, static _DECL nodes.
11777 It is both wasteful and incorrect to call operand_equal_p
11778 to compare the two ADDR_EXPR nodes. It is wasteful in that
11779 all we need to do is test pointer equality for the arguments
11780 to the two ADDR_EXPR nodes. It is incorrect to use
11781 operand_equal_p as that function is NOT equivalent to a
11782 C equality test. It can in fact return false for two
11783 objects which would test as equal using the C equality
11785 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11786 return constant_boolean_node (equal
11787 ? code == EQ_EXPR : code != EQ_EXPR,
11791 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11792 a MINUS_EXPR of a constant, we can convert it into a comparison with
11793 a revised constant as long as no overflow occurs. */
11794 if (TREE_CODE (arg1) == INTEGER_CST
11795 && (TREE_CODE (arg0) == PLUS_EXPR
11796 || TREE_CODE (arg0) == MINUS_EXPR)
11797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11798 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11799 ? MINUS_EXPR : PLUS_EXPR,
11800 fold_convert (TREE_TYPE (arg0), arg1),
11801 TREE_OPERAND (arg0, 1), 0))
11802 && !TREE_OVERFLOW (tem))
11803 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11805 /* Similarly for a NEGATE_EXPR. */
11806 if (TREE_CODE (arg0) == NEGATE_EXPR
11807 && TREE_CODE (arg1) == INTEGER_CST
11808 && 0 != (tem = negate_expr (arg1))
11809 && TREE_CODE (tem) == INTEGER_CST
11810 && !TREE_OVERFLOW (tem))
11811 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11813 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11814 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11815 && TREE_CODE (arg1) == INTEGER_CST
11816 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11817 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11818 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11819 fold_convert (TREE_TYPE (arg0), arg1),
11820 TREE_OPERAND (arg0, 1)));
11822 /* Transform comparisons of the form X +- C CMP X. */
11823 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11824 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11825 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11826 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11827 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11829 tree cst = TREE_OPERAND (arg0, 1);
11831 if (code == EQ_EXPR
11832 && !integer_zerop (cst))
11833 return omit_two_operands (type, boolean_false_node,
11834 TREE_OPERAND (arg0, 0), arg1);
11836 return omit_two_operands (type, boolean_true_node,
11837 TREE_OPERAND (arg0, 0), arg1);
11840 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11841 for !=. Don't do this for ordered comparisons due to overflow. */
11842 if (TREE_CODE (arg0) == MINUS_EXPR
11843 && integer_zerop (arg1))
11844 return fold_build2 (code, type,
11845 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11847 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11848 if (TREE_CODE (arg0) == ABS_EXPR
11849 && (integer_zerop (arg1) || real_zerop (arg1)))
11850 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11852 /* If this is an EQ or NE comparison with zero and ARG0 is
11853 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11854 two operations, but the latter can be done in one less insn
11855 on machines that have only two-operand insns or on which a
11856 constant cannot be the first operand. */
11857 if (TREE_CODE (arg0) == BIT_AND_EXPR
11858 && integer_zerop (arg1))
11860 tree arg00 = TREE_OPERAND (arg0, 0);
11861 tree arg01 = TREE_OPERAND (arg0, 1);
11862 if (TREE_CODE (arg00) == LSHIFT_EXPR
11863 && integer_onep (TREE_OPERAND (arg00, 0)))
11865 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11866 arg01, TREE_OPERAND (arg00, 1));
11867 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11868 build_int_cst (TREE_TYPE (arg0), 1));
11869 return fold_build2 (code, type,
11870 fold_convert (TREE_TYPE (arg1), tem), arg1);
11872 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11873 && integer_onep (TREE_OPERAND (arg01, 0)))
11875 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11876 arg00, TREE_OPERAND (arg01, 1));
11877 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11878 build_int_cst (TREE_TYPE (arg0), 1));
11879 return fold_build2 (code, type,
11880 fold_convert (TREE_TYPE (arg1), tem), arg1);
11884 /* If this is an NE or EQ comparison of zero against the result of a
11885 signed MOD operation whose second operand is a power of 2, make
11886 the MOD operation unsigned since it is simpler and equivalent. */
11887 if (integer_zerop (arg1)
11888 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11889 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11890 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11891 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11892 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11893 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11895 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11896 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11897 fold_convert (newtype,
11898 TREE_OPERAND (arg0, 0)),
11899 fold_convert (newtype,
11900 TREE_OPERAND (arg0, 1)));
11902 return fold_build2 (code, type, newmod,
11903 fold_convert (newtype, arg1));
11906 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11907 C1 is a valid shift constant, and C2 is a power of two, i.e.
11909 if (TREE_CODE (arg0) == BIT_AND_EXPR
11910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11911 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11913 && integer_pow2p (TREE_OPERAND (arg0, 1))
11914 && integer_zerop (arg1))
11916 tree itype = TREE_TYPE (arg0);
11917 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11918 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11920 /* Check for a valid shift count. */
11921 if (TREE_INT_CST_HIGH (arg001) == 0
11922 && TREE_INT_CST_LOW (arg001) < prec)
11924 tree arg01 = TREE_OPERAND (arg0, 1);
11925 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11926 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11927 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11928 can be rewritten as (X & (C2 << C1)) != 0. */
11929 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11931 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11932 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11933 return fold_build2 (code, type, tem, arg1);
11935 /* Otherwise, for signed (arithmetic) shifts,
11936 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11937 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11938 else if (!TYPE_UNSIGNED (itype))
11939 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11940 arg000, build_int_cst (itype, 0));
11941 /* Otherwise, of unsigned (logical) shifts,
11942 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11943 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11945 return omit_one_operand (type,
11946 code == EQ_EXPR ? integer_one_node
11947 : integer_zero_node,
11952 /* If this is an NE comparison of zero with an AND of one, remove the
11953 comparison since the AND will give the correct value. */
11954 if (code == NE_EXPR
11955 && integer_zerop (arg1)
11956 && TREE_CODE (arg0) == BIT_AND_EXPR
11957 && integer_onep (TREE_OPERAND (arg0, 1)))
11958 return fold_convert (type, arg0);
11960 /* If we have (A & C) == C where C is a power of 2, convert this into
11961 (A & C) != 0. Similarly for NE_EXPR. */
11962 if (TREE_CODE (arg0) == BIT_AND_EXPR
11963 && integer_pow2p (TREE_OPERAND (arg0, 1))
11964 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11965 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11966 arg0, fold_convert (TREE_TYPE (arg0),
11967 integer_zero_node));
11969 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11970 bit, then fold the expression into A < 0 or A >= 0. */
11971 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11975 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11976 Similarly for NE_EXPR. */
11977 if (TREE_CODE (arg0) == BIT_AND_EXPR
11978 && TREE_CODE (arg1) == INTEGER_CST
11979 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11981 tree notc = fold_build1 (BIT_NOT_EXPR,
11982 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11983 TREE_OPERAND (arg0, 1));
11984 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11986 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11987 if (integer_nonzerop (dandnotc))
11988 return omit_one_operand (type, rslt, arg0);
11991 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11992 Similarly for NE_EXPR. */
11993 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11994 && TREE_CODE (arg1) == INTEGER_CST
11995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11997 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11998 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11999 TREE_OPERAND (arg0, 1), notd);
12000 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12001 if (integer_nonzerop (candnotd))
12002 return omit_one_operand (type, rslt, arg0);
12005 /* Optimize comparisons of strlen vs zero to a compare of the
12006 first character of the string vs zero. To wit,
12007 strlen(ptr) == 0 => *ptr == 0
12008 strlen(ptr) != 0 => *ptr != 0
12009 Other cases should reduce to one of these two (or a constant)
12010 due to the return value of strlen being unsigned. */
12011 if (TREE_CODE (arg0) == CALL_EXPR
12012 && integer_zerop (arg1))
12014 tree fndecl = get_callee_fndecl (arg0);
12017 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12018 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12019 && call_expr_nargs (arg0) == 1
12020 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12022 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12023 return fold_build2 (code, type, iref,
12024 build_int_cst (TREE_TYPE (iref), 0));
12028 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12029 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12030 if (TREE_CODE (arg0) == RSHIFT_EXPR
12031 && integer_zerop (arg1)
12032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12034 tree arg00 = TREE_OPERAND (arg0, 0);
12035 tree arg01 = TREE_OPERAND (arg0, 1);
12036 tree itype = TREE_TYPE (arg00);
12037 if (TREE_INT_CST_HIGH (arg01) == 0
12038 && TREE_INT_CST_LOW (arg01)
12039 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12041 if (TYPE_UNSIGNED (itype))
12043 itype = signed_type_for (itype);
12044 arg00 = fold_convert (itype, arg00);
12046 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12047 type, arg00, build_int_cst (itype, 0));
12051 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12052 if (integer_zerop (arg1)
12053 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12054 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12055 TREE_OPERAND (arg0, 1));
12057 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12058 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12059 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12060 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12061 build_int_cst (TREE_TYPE (arg1), 0));
12062 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12063 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12064 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12065 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12066 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12067 build_int_cst (TREE_TYPE (arg1), 0));
12069 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12070 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12071 && TREE_CODE (arg1) == INTEGER_CST
12072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12073 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12074 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12075 TREE_OPERAND (arg0, 1), arg1));
12077 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12078 (X & C) == 0 when C is a single bit. */
12079 if (TREE_CODE (arg0) == BIT_AND_EXPR
12080 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12081 && integer_zerop (arg1)
12082 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12084 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12085 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12086 TREE_OPERAND (arg0, 1));
12087 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12091 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12092 constant C is a power of two, i.e. a single bit. */
12093 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12094 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12095 && integer_zerop (arg1)
12096 && integer_pow2p (TREE_OPERAND (arg0, 1))
12097 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12098 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12100 tree arg00 = TREE_OPERAND (arg0, 0);
12101 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12102 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12105 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12106 when is C is a power of two, i.e. a single bit. */
12107 if (TREE_CODE (arg0) == BIT_AND_EXPR
12108 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12109 && integer_zerop (arg1)
12110 && integer_pow2p (TREE_OPERAND (arg0, 1))
12111 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12112 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12114 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12115 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12116 arg000, TREE_OPERAND (arg0, 1));
12117 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12118 tem, build_int_cst (TREE_TYPE (tem), 0));
12121 if (integer_zerop (arg1)
12122 && tree_expr_nonzero_p (arg0))
12124 tree res = constant_boolean_node (code==NE_EXPR, type);
12125 return omit_one_operand (type, res, arg0);
12128 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12129 if (TREE_CODE (arg0) == NEGATE_EXPR
12130 && TREE_CODE (arg1) == NEGATE_EXPR)
12131 return fold_build2 (code, type,
12132 TREE_OPERAND (arg0, 0),
12133 TREE_OPERAND (arg1, 0));
12135 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12136 if (TREE_CODE (arg0) == BIT_AND_EXPR
12137 && TREE_CODE (arg1) == BIT_AND_EXPR)
12139 tree arg00 = TREE_OPERAND (arg0, 0);
12140 tree arg01 = TREE_OPERAND (arg0, 1);
12141 tree arg10 = TREE_OPERAND (arg1, 0);
12142 tree arg11 = TREE_OPERAND (arg1, 1);
12143 tree itype = TREE_TYPE (arg0);
12145 if (operand_equal_p (arg01, arg11, 0))
12146 return fold_build2 (code, type,
12147 fold_build2 (BIT_AND_EXPR, itype,
12148 fold_build2 (BIT_XOR_EXPR, itype,
12151 build_int_cst (itype, 0));
12153 if (operand_equal_p (arg01, arg10, 0))
12154 return fold_build2 (code, type,
12155 fold_build2 (BIT_AND_EXPR, itype,
12156 fold_build2 (BIT_XOR_EXPR, itype,
12159 build_int_cst (itype, 0));
12161 if (operand_equal_p (arg00, arg11, 0))
12162 return fold_build2 (code, type,
12163 fold_build2 (BIT_AND_EXPR, itype,
12164 fold_build2 (BIT_XOR_EXPR, itype,
12167 build_int_cst (itype, 0));
12169 if (operand_equal_p (arg00, arg10, 0))
12170 return fold_build2 (code, type,
12171 fold_build2 (BIT_AND_EXPR, itype,
12172 fold_build2 (BIT_XOR_EXPR, itype,
12175 build_int_cst (itype, 0));
12178 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12179 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12181 tree arg00 = TREE_OPERAND (arg0, 0);
12182 tree arg01 = TREE_OPERAND (arg0, 1);
12183 tree arg10 = TREE_OPERAND (arg1, 0);
12184 tree arg11 = TREE_OPERAND (arg1, 1);
12185 tree itype = TREE_TYPE (arg0);
12187 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12188 operand_equal_p guarantees no side-effects so we don't need
12189 to use omit_one_operand on Z. */
12190 if (operand_equal_p (arg01, arg11, 0))
12191 return fold_build2 (code, type, arg00, arg10);
12192 if (operand_equal_p (arg01, arg10, 0))
12193 return fold_build2 (code, type, arg00, arg11);
12194 if (operand_equal_p (arg00, arg11, 0))
12195 return fold_build2 (code, type, arg01, arg10);
12196 if (operand_equal_p (arg00, arg10, 0))
12197 return fold_build2 (code, type, arg01, arg11);
12199 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12200 if (TREE_CODE (arg01) == INTEGER_CST
12201 && TREE_CODE (arg11) == INTEGER_CST)
12202 return fold_build2 (code, type,
12203 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12204 fold_build2 (BIT_XOR_EXPR, itype,
12209 /* Attempt to simplify equality/inequality comparisons of complex
12210 values. Only lower the comparison if the result is known or
12211 can be simplified to a single scalar comparison. */
12212 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12213 || TREE_CODE (arg0) == COMPLEX_CST)
12214 && (TREE_CODE (arg1) == COMPLEX_EXPR
12215 || TREE_CODE (arg1) == COMPLEX_CST))
12217 tree real0, imag0, real1, imag1;
12220 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12222 real0 = TREE_OPERAND (arg0, 0);
12223 imag0 = TREE_OPERAND (arg0, 1);
12227 real0 = TREE_REALPART (arg0);
12228 imag0 = TREE_IMAGPART (arg0);
12231 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12233 real1 = TREE_OPERAND (arg1, 0);
12234 imag1 = TREE_OPERAND (arg1, 1);
12238 real1 = TREE_REALPART (arg1);
12239 imag1 = TREE_IMAGPART (arg1);
12242 rcond = fold_binary (code, type, real0, real1);
12243 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12245 if (integer_zerop (rcond))
12247 if (code == EQ_EXPR)
12248 return omit_two_operands (type, boolean_false_node,
12250 return fold_build2 (NE_EXPR, type, imag0, imag1);
12254 if (code == NE_EXPR)
12255 return omit_two_operands (type, boolean_true_node,
12257 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12261 icond = fold_binary (code, type, imag0, imag1);
12262 if (icond && TREE_CODE (icond) == INTEGER_CST)
12264 if (integer_zerop (icond))
12266 if (code == EQ_EXPR)
12267 return omit_two_operands (type, boolean_false_node,
12269 return fold_build2 (NE_EXPR, type, real0, real1);
12273 if (code == NE_EXPR)
12274 return omit_two_operands (type, boolean_true_node,
12276 return fold_build2 (EQ_EXPR, type, real0, real1);
12287 tem = fold_comparison (code, type, op0, op1);
12288 if (tem != NULL_TREE)
12291 /* Transform comparisons of the form X +- C CMP X. */
12292 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12293 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12294 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12295 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12296 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12297 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12299 tree arg01 = TREE_OPERAND (arg0, 1);
12300 enum tree_code code0 = TREE_CODE (arg0);
12303 if (TREE_CODE (arg01) == REAL_CST)
12304 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12306 is_positive = tree_int_cst_sgn (arg01);
12308 /* (X - c) > X becomes false. */
12309 if (code == GT_EXPR
12310 && ((code0 == MINUS_EXPR && is_positive >= 0)
12311 || (code0 == PLUS_EXPR && is_positive <= 0)))
12313 if (TREE_CODE (arg01) == INTEGER_CST
12314 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12315 fold_overflow_warning (("assuming signed overflow does not "
12316 "occur when assuming that (X - c) > X "
12317 "is always false"),
12318 WARN_STRICT_OVERFLOW_ALL);
12319 return constant_boolean_node (0, type);
12322 /* Likewise (X + c) < X becomes false. */
12323 if (code == LT_EXPR
12324 && ((code0 == PLUS_EXPR && is_positive >= 0)
12325 || (code0 == MINUS_EXPR && is_positive <= 0)))
12327 if (TREE_CODE (arg01) == INTEGER_CST
12328 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12329 fold_overflow_warning (("assuming signed overflow does not "
12330 "occur when assuming that "
12331 "(X + c) < X is always false"),
12332 WARN_STRICT_OVERFLOW_ALL);
12333 return constant_boolean_node (0, type);
12336 /* Convert (X - c) <= X to true. */
12337 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12339 && ((code0 == MINUS_EXPR && is_positive >= 0)
12340 || (code0 == PLUS_EXPR && is_positive <= 0)))
12342 if (TREE_CODE (arg01) == INTEGER_CST
12343 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12344 fold_overflow_warning (("assuming signed overflow does not "
12345 "occur when assuming that "
12346 "(X - c) <= X is always true"),
12347 WARN_STRICT_OVERFLOW_ALL);
12348 return constant_boolean_node (1, type);
12351 /* Convert (X + c) >= X to true. */
12352 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12354 && ((code0 == PLUS_EXPR && is_positive >= 0)
12355 || (code0 == MINUS_EXPR && is_positive <= 0)))
12357 if (TREE_CODE (arg01) == INTEGER_CST
12358 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12359 fold_overflow_warning (("assuming signed overflow does not "
12360 "occur when assuming that "
12361 "(X + c) >= X is always true"),
12362 WARN_STRICT_OVERFLOW_ALL);
12363 return constant_boolean_node (1, type);
12366 if (TREE_CODE (arg01) == INTEGER_CST)
12368 /* Convert X + c > X and X - c < X to true for integers. */
12369 if (code == GT_EXPR
12370 && ((code0 == PLUS_EXPR && is_positive > 0)
12371 || (code0 == MINUS_EXPR && is_positive < 0)))
12373 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12374 fold_overflow_warning (("assuming signed overflow does "
12375 "not occur when assuming that "
12376 "(X + c) > X is always true"),
12377 WARN_STRICT_OVERFLOW_ALL);
12378 return constant_boolean_node (1, type);
12381 if (code == LT_EXPR
12382 && ((code0 == MINUS_EXPR && is_positive > 0)
12383 || (code0 == PLUS_EXPR && is_positive < 0)))
12385 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12386 fold_overflow_warning (("assuming signed overflow does "
12387 "not occur when assuming that "
12388 "(X - c) < X is always true"),
12389 WARN_STRICT_OVERFLOW_ALL);
12390 return constant_boolean_node (1, type);
12393 /* Convert X + c <= X and X - c >= X to false for integers. */
12394 if (code == LE_EXPR
12395 && ((code0 == PLUS_EXPR && is_positive > 0)
12396 || (code0 == MINUS_EXPR && is_positive < 0)))
12398 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12399 fold_overflow_warning (("assuming signed overflow does "
12400 "not occur when assuming that "
12401 "(X + c) <= X is always false"),
12402 WARN_STRICT_OVERFLOW_ALL);
12403 return constant_boolean_node (0, type);
12406 if (code == GE_EXPR
12407 && ((code0 == MINUS_EXPR && is_positive > 0)
12408 || (code0 == PLUS_EXPR && is_positive < 0)))
12410 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12411 fold_overflow_warning (("assuming signed overflow does "
12412 "not occur when assuming that "
12413 "(X - c) >= X is always false"),
12414 WARN_STRICT_OVERFLOW_ALL);
12415 return constant_boolean_node (0, type);
12420 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12421 This transformation affects the cases which are handled in later
12422 optimizations involving comparisons with non-negative constants. */
12423 if (TREE_CODE (arg1) == INTEGER_CST
12424 && TREE_CODE (arg0) != INTEGER_CST
12425 && tree_int_cst_sgn (arg1) > 0)
12427 if (code == GE_EXPR)
12429 arg1 = const_binop (MINUS_EXPR, arg1,
12430 build_int_cst (TREE_TYPE (arg1), 1), 0);
12431 return fold_build2 (GT_EXPR, type, arg0,
12432 fold_convert (TREE_TYPE (arg0), arg1));
12434 if (code == LT_EXPR)
12436 arg1 = const_binop (MINUS_EXPR, arg1,
12437 build_int_cst (TREE_TYPE (arg1), 1), 0);
12438 return fold_build2 (LE_EXPR, type, arg0,
12439 fold_convert (TREE_TYPE (arg0), arg1));
12443 /* Comparisons with the highest or lowest possible integer of
12444 the specified precision will have known values. */
12446 tree arg1_type = TREE_TYPE (arg1);
12447 unsigned int width = TYPE_PRECISION (arg1_type);
12449 if (TREE_CODE (arg1) == INTEGER_CST
12450 && !TREE_OVERFLOW (arg1)
12451 && width <= 2 * HOST_BITS_PER_WIDE_INT
12452 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12454 HOST_WIDE_INT signed_max_hi;
12455 unsigned HOST_WIDE_INT signed_max_lo;
12456 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12458 if (width <= HOST_BITS_PER_WIDE_INT)
12460 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12465 if (TYPE_UNSIGNED (arg1_type))
12467 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12473 max_lo = signed_max_lo;
12474 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12480 width -= HOST_BITS_PER_WIDE_INT;
12481 signed_max_lo = -1;
12482 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12487 if (TYPE_UNSIGNED (arg1_type))
12489 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12494 max_hi = signed_max_hi;
12495 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12499 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12500 && TREE_INT_CST_LOW (arg1) == max_lo)
12504 return omit_one_operand (type, integer_zero_node, arg0);
12507 return fold_build2 (EQ_EXPR, type, op0, op1);
12510 return omit_one_operand (type, integer_one_node, arg0);
12513 return fold_build2 (NE_EXPR, type, op0, op1);
12515 /* The GE_EXPR and LT_EXPR cases above are not normally
12516 reached because of previous transformations. */
12521 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12523 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12527 arg1 = const_binop (PLUS_EXPR, arg1,
12528 build_int_cst (TREE_TYPE (arg1), 1), 0);
12529 return fold_build2 (EQ_EXPR, type,
12530 fold_convert (TREE_TYPE (arg1), arg0),
12533 arg1 = const_binop (PLUS_EXPR, arg1,
12534 build_int_cst (TREE_TYPE (arg1), 1), 0);
12535 return fold_build2 (NE_EXPR, type,
12536 fold_convert (TREE_TYPE (arg1), arg0),
12541 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12543 && TREE_INT_CST_LOW (arg1) == min_lo)
12547 return omit_one_operand (type, integer_zero_node, arg0);
12550 return fold_build2 (EQ_EXPR, type, op0, op1);
12553 return omit_one_operand (type, integer_one_node, arg0);
12556 return fold_build2 (NE_EXPR, type, op0, op1);
12561 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12563 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12567 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12568 return fold_build2 (NE_EXPR, type,
12569 fold_convert (TREE_TYPE (arg1), arg0),
12572 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12573 return fold_build2 (EQ_EXPR, type,
12574 fold_convert (TREE_TYPE (arg1), arg0),
12580 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12581 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12582 && TYPE_UNSIGNED (arg1_type)
12583 /* We will flip the signedness of the comparison operator
12584 associated with the mode of arg1, so the sign bit is
12585 specified by this mode. Check that arg1 is the signed
12586 max associated with this sign bit. */
12587 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12588 /* signed_type does not work on pointer types. */
12589 && INTEGRAL_TYPE_P (arg1_type))
12591 /* The following case also applies to X < signed_max+1
12592 and X >= signed_max+1 because previous transformations. */
12593 if (code == LE_EXPR || code == GT_EXPR)
12596 st = signed_type_for (TREE_TYPE (arg1));
12597 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12598 type, fold_convert (st, arg0),
12599 build_int_cst (st, 0));
12605 /* If we are comparing an ABS_EXPR with a constant, we can
12606 convert all the cases into explicit comparisons, but they may
12607 well not be faster than doing the ABS and one comparison.
12608 But ABS (X) <= C is a range comparison, which becomes a subtraction
12609 and a comparison, and is probably faster. */
12610 if (code == LE_EXPR
12611 && TREE_CODE (arg1) == INTEGER_CST
12612 && TREE_CODE (arg0) == ABS_EXPR
12613 && ! TREE_SIDE_EFFECTS (arg0)
12614 && (0 != (tem = negate_expr (arg1)))
12615 && TREE_CODE (tem) == INTEGER_CST
12616 && !TREE_OVERFLOW (tem))
12617 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12618 build2 (GE_EXPR, type,
12619 TREE_OPERAND (arg0, 0), tem),
12620 build2 (LE_EXPR, type,
12621 TREE_OPERAND (arg0, 0), arg1));
12623 /* Convert ABS_EXPR<x> >= 0 to true. */
12624 strict_overflow_p = false;
12625 if (code == GE_EXPR
12626 && (integer_zerop (arg1)
12627 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12628 && real_zerop (arg1)))
12629 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12631 if (strict_overflow_p)
12632 fold_overflow_warning (("assuming signed overflow does not occur "
12633 "when simplifying comparison of "
12634 "absolute value and zero"),
12635 WARN_STRICT_OVERFLOW_CONDITIONAL);
12636 return omit_one_operand (type, integer_one_node, arg0);
12639 /* Convert ABS_EXPR<x> < 0 to false. */
12640 strict_overflow_p = false;
12641 if (code == LT_EXPR
12642 && (integer_zerop (arg1) || real_zerop (arg1))
12643 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12645 if (strict_overflow_p)
12646 fold_overflow_warning (("assuming signed overflow does not occur "
12647 "when simplifying comparison of "
12648 "absolute value and zero"),
12649 WARN_STRICT_OVERFLOW_CONDITIONAL);
12650 return omit_one_operand (type, integer_zero_node, arg0);
12653 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12654 and similarly for >= into !=. */
12655 if ((code == LT_EXPR || code == GE_EXPR)
12656 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12657 && TREE_CODE (arg1) == LSHIFT_EXPR
12658 && integer_onep (TREE_OPERAND (arg1, 0)))
12659 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12660 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12661 TREE_OPERAND (arg1, 1)),
12662 build_int_cst (TREE_TYPE (arg0), 0));
12664 if ((code == LT_EXPR || code == GE_EXPR)
12665 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12666 && (TREE_CODE (arg1) == NOP_EXPR
12667 || TREE_CODE (arg1) == CONVERT_EXPR)
12668 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12669 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12671 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12672 fold_convert (TREE_TYPE (arg0),
12673 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12674 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12676 build_int_cst (TREE_TYPE (arg0), 0));
12680 case UNORDERED_EXPR:
12688 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12690 t1 = fold_relational_const (code, type, arg0, arg1);
12691 if (t1 != NULL_TREE)
12695 /* If the first operand is NaN, the result is constant. */
12696 if (TREE_CODE (arg0) == REAL_CST
12697 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12698 && (code != LTGT_EXPR || ! flag_trapping_math))
12700 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12701 ? integer_zero_node
12702 : integer_one_node;
12703 return omit_one_operand (type, t1, arg1);
12706 /* If the second operand is NaN, the result is constant. */
12707 if (TREE_CODE (arg1) == REAL_CST
12708 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12709 && (code != LTGT_EXPR || ! flag_trapping_math))
12711 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12712 ? integer_zero_node
12713 : integer_one_node;
12714 return omit_one_operand (type, t1, arg0);
12717 /* Simplify unordered comparison of something with itself. */
12718 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12719 && operand_equal_p (arg0, arg1, 0))
12720 return constant_boolean_node (1, type);
12722 if (code == LTGT_EXPR
12723 && !flag_trapping_math
12724 && operand_equal_p (arg0, arg1, 0))
12725 return constant_boolean_node (0, type);
12727 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12729 tree targ0 = strip_float_extensions (arg0);
12730 tree targ1 = strip_float_extensions (arg1);
12731 tree newtype = TREE_TYPE (targ0);
12733 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12734 newtype = TREE_TYPE (targ1);
12736 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12737 return fold_build2 (code, type, fold_convert (newtype, targ0),
12738 fold_convert (newtype, targ1));
12743 case COMPOUND_EXPR:
12744 /* When pedantic, a compound expression can be neither an lvalue
12745 nor an integer constant expression. */
12746 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12748 /* Don't let (0, 0) be null pointer constant. */
12749 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12750 : fold_convert (type, arg1);
12751 return pedantic_non_lvalue (tem);
12754 if ((TREE_CODE (arg0) == REAL_CST
12755 && TREE_CODE (arg1) == REAL_CST)
12756 || (TREE_CODE (arg0) == INTEGER_CST
12757 && TREE_CODE (arg1) == INTEGER_CST))
12758 return build_complex (type, arg0, arg1);
12762 /* An ASSERT_EXPR should never be passed to fold_binary. */
12763 gcc_unreachable ();
12767 } /* switch (code) */
12770 /* Callback for walk_tree, looking for LABEL_EXPR.
12771 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12772 Do not check the sub-tree of GOTO_EXPR. */
12775 contains_label_1 (tree *tp,
12776 int *walk_subtrees,
12777 void *data ATTRIBUTE_UNUSED)
12779 switch (TREE_CODE (*tp))
12784 *walk_subtrees = 0;
12791 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12792 accessible from outside the sub-tree. Returns NULL_TREE if no
12793 addressable label is found. */
12796 contains_label_p (tree st)
12798 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12801 /* Fold a ternary expression of code CODE and type TYPE with operands
12802 OP0, OP1, and OP2. Return the folded expression if folding is
12803 successful. Otherwise, return NULL_TREE. */
12806 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12809 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12810 enum tree_code_class kind = TREE_CODE_CLASS (code);
12812 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12813 && TREE_CODE_LENGTH (code) == 3);
12815 /* Strip any conversions that don't change the mode. This is safe
12816 for every expression, except for a comparison expression because
12817 its signedness is derived from its operands. So, in the latter
12818 case, only strip conversions that don't change the signedness.
12820 Note that this is done as an internal manipulation within the
12821 constant folder, in order to find the simplest representation of
12822 the arguments so that their form can be studied. In any cases,
12823 the appropriate type conversions should be put back in the tree
12824 that will get out of the constant folder. */
12839 case COMPONENT_REF:
12840 if (TREE_CODE (arg0) == CONSTRUCTOR
12841 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12843 unsigned HOST_WIDE_INT idx;
12845 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12852 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12853 so all simple results must be passed through pedantic_non_lvalue. */
12854 if (TREE_CODE (arg0) == INTEGER_CST)
12856 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12857 tem = integer_zerop (arg0) ? op2 : op1;
12858 /* Only optimize constant conditions when the selected branch
12859 has the same type as the COND_EXPR. This avoids optimizing
12860 away "c ? x : throw", where the throw has a void type.
12861 Avoid throwing away that operand which contains label. */
12862 if ((!TREE_SIDE_EFFECTS (unused_op)
12863 || !contains_label_p (unused_op))
12864 && (! VOID_TYPE_P (TREE_TYPE (tem))
12865 || VOID_TYPE_P (type)))
12866 return pedantic_non_lvalue (tem);
12869 if (operand_equal_p (arg1, op2, 0))
12870 return pedantic_omit_one_operand (type, arg1, arg0);
12872 /* If we have A op B ? A : C, we may be able to convert this to a
12873 simpler expression, depending on the operation and the values
12874 of B and C. Signed zeros prevent all of these transformations,
12875 for reasons given above each one.
12877 Also try swapping the arguments and inverting the conditional. */
12878 if (COMPARISON_CLASS_P (arg0)
12879 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12880 arg1, TREE_OPERAND (arg0, 1))
12881 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12883 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12888 if (COMPARISON_CLASS_P (arg0)
12889 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12891 TREE_OPERAND (arg0, 1))
12892 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12894 tem = fold_truth_not_expr (arg0);
12895 if (tem && COMPARISON_CLASS_P (tem))
12897 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12903 /* If the second operand is simpler than the third, swap them
12904 since that produces better jump optimization results. */
12905 if (truth_value_p (TREE_CODE (arg0))
12906 && tree_swap_operands_p (op1, op2, false))
12908 /* See if this can be inverted. If it can't, possibly because
12909 it was a floating-point inequality comparison, don't do
12911 tem = fold_truth_not_expr (arg0);
12913 return fold_build3 (code, type, tem, op2, op1);
12916 /* Convert A ? 1 : 0 to simply A. */
12917 if (integer_onep (op1)
12918 && integer_zerop (op2)
12919 /* If we try to convert OP0 to our type, the
12920 call to fold will try to move the conversion inside
12921 a COND, which will recurse. In that case, the COND_EXPR
12922 is probably the best choice, so leave it alone. */
12923 && type == TREE_TYPE (arg0))
12924 return pedantic_non_lvalue (arg0);
12926 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12927 over COND_EXPR in cases such as floating point comparisons. */
12928 if (integer_zerop (op1)
12929 && integer_onep (op2)
12930 && truth_value_p (TREE_CODE (arg0)))
12931 return pedantic_non_lvalue (fold_convert (type,
12932 invert_truthvalue (arg0)));
12934 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12935 if (TREE_CODE (arg0) == LT_EXPR
12936 && integer_zerop (TREE_OPERAND (arg0, 1))
12937 && integer_zerop (op2)
12938 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12940 /* sign_bit_p only checks ARG1 bits within A's precision.
12941 If <sign bit of A> has wider type than A, bits outside
12942 of A's precision in <sign bit of A> need to be checked.
12943 If they are all 0, this optimization needs to be done
12944 in unsigned A's type, if they are all 1 in signed A's type,
12945 otherwise this can't be done. */
12946 if (TYPE_PRECISION (TREE_TYPE (tem))
12947 < TYPE_PRECISION (TREE_TYPE (arg1))
12948 && TYPE_PRECISION (TREE_TYPE (tem))
12949 < TYPE_PRECISION (type))
12951 unsigned HOST_WIDE_INT mask_lo;
12952 HOST_WIDE_INT mask_hi;
12953 int inner_width, outer_width;
12956 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12957 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12958 if (outer_width > TYPE_PRECISION (type))
12959 outer_width = TYPE_PRECISION (type);
12961 if (outer_width > HOST_BITS_PER_WIDE_INT)
12963 mask_hi = ((unsigned HOST_WIDE_INT) -1
12964 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12970 mask_lo = ((unsigned HOST_WIDE_INT) -1
12971 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12973 if (inner_width > HOST_BITS_PER_WIDE_INT)
12975 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12976 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12980 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12981 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12983 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12984 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12986 tem_type = signed_type_for (TREE_TYPE (tem));
12987 tem = fold_convert (tem_type, tem);
12989 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12990 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12992 tem_type = unsigned_type_for (TREE_TYPE (tem));
12993 tem = fold_convert (tem_type, tem);
13000 return fold_convert (type,
13001 fold_build2 (BIT_AND_EXPR,
13002 TREE_TYPE (tem), tem,
13003 fold_convert (TREE_TYPE (tem),
13007 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13008 already handled above. */
13009 if (TREE_CODE (arg0) == BIT_AND_EXPR
13010 && integer_onep (TREE_OPERAND (arg0, 1))
13011 && integer_zerop (op2)
13012 && integer_pow2p (arg1))
13014 tree tem = TREE_OPERAND (arg0, 0);
13016 if (TREE_CODE (tem) == RSHIFT_EXPR
13017 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13018 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13019 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13020 return fold_build2 (BIT_AND_EXPR, type,
13021 TREE_OPERAND (tem, 0), arg1);
13024 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13025 is probably obsolete because the first operand should be a
13026 truth value (that's why we have the two cases above), but let's
13027 leave it in until we can confirm this for all front-ends. */
13028 if (integer_zerop (op2)
13029 && TREE_CODE (arg0) == NE_EXPR
13030 && integer_zerop (TREE_OPERAND (arg0, 1))
13031 && integer_pow2p (arg1)
13032 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13033 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13034 arg1, OEP_ONLY_CONST))
13035 return pedantic_non_lvalue (fold_convert (type,
13036 TREE_OPERAND (arg0, 0)));
13038 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13039 if (integer_zerop (op2)
13040 && truth_value_p (TREE_CODE (arg0))
13041 && truth_value_p (TREE_CODE (arg1)))
13042 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13043 fold_convert (type, arg0),
13046 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13047 if (integer_onep (op2)
13048 && truth_value_p (TREE_CODE (arg0))
13049 && truth_value_p (TREE_CODE (arg1)))
13051 /* Only perform transformation if ARG0 is easily inverted. */
13052 tem = fold_truth_not_expr (arg0);
13054 return fold_build2 (TRUTH_ORIF_EXPR, type,
13055 fold_convert (type, tem),
13059 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13060 if (integer_zerop (arg1)
13061 && truth_value_p (TREE_CODE (arg0))
13062 && truth_value_p (TREE_CODE (op2)))
13064 /* Only perform transformation if ARG0 is easily inverted. */
13065 tem = fold_truth_not_expr (arg0);
13067 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13068 fold_convert (type, tem),
13072 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13073 if (integer_onep (arg1)
13074 && truth_value_p (TREE_CODE (arg0))
13075 && truth_value_p (TREE_CODE (op2)))
13076 return fold_build2 (TRUTH_ORIF_EXPR, type,
13077 fold_convert (type, arg0),
13083 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13084 of fold_ternary on them. */
13085 gcc_unreachable ();
13087 case BIT_FIELD_REF:
13088 if ((TREE_CODE (arg0) == VECTOR_CST
13089 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13090 && type == TREE_TYPE (TREE_TYPE (arg0)))
13092 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13093 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13096 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13097 && (idx % width) == 0
13098 && (idx = idx / width)
13099 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13101 tree elements = NULL_TREE;
13103 if (TREE_CODE (arg0) == VECTOR_CST)
13104 elements = TREE_VECTOR_CST_ELTS (arg0);
13107 unsigned HOST_WIDE_INT idx;
13110 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13111 elements = tree_cons (NULL_TREE, value, elements);
13113 while (idx-- > 0 && elements)
13114 elements = TREE_CHAIN (elements);
13116 return TREE_VALUE (elements);
13118 return fold_convert (type, integer_zero_node);
13125 } /* switch (code) */
13128 /* Perform constant folding and related simplification of EXPR.
13129 The related simplifications include x*1 => x, x*0 => 0, etc.,
13130 and application of the associative law.
13131 NOP_EXPR conversions may be removed freely (as long as we
13132 are careful not to change the type of the overall expression).
13133 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13134 but we can constant-fold them if they have constant operands. */
13136 #ifdef ENABLE_FOLD_CHECKING
13137 # define fold(x) fold_1 (x)
13138 static tree fold_1 (tree);
13144 const tree t = expr;
13145 enum tree_code code = TREE_CODE (t);
13146 enum tree_code_class kind = TREE_CODE_CLASS (code);
13149 /* Return right away if a constant. */
13150 if (kind == tcc_constant)
13153 /* CALL_EXPR-like objects with variable numbers of operands are
13154 treated specially. */
13155 if (kind == tcc_vl_exp)
13157 if (code == CALL_EXPR)
13159 tem = fold_call_expr (expr, false);
13160 return tem ? tem : expr;
13165 if (IS_EXPR_CODE_CLASS (kind)
13166 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13168 tree type = TREE_TYPE (t);
13169 tree op0, op1, op2;
13171 switch (TREE_CODE_LENGTH (code))
13174 op0 = TREE_OPERAND (t, 0);
13175 tem = fold_unary (code, type, op0);
13176 return tem ? tem : expr;
13178 op0 = TREE_OPERAND (t, 0);
13179 op1 = TREE_OPERAND (t, 1);
13180 tem = fold_binary (code, type, op0, op1);
13181 return tem ? tem : expr;
13183 op0 = TREE_OPERAND (t, 0);
13184 op1 = TREE_OPERAND (t, 1);
13185 op2 = TREE_OPERAND (t, 2);
13186 tem = fold_ternary (code, type, op0, op1, op2);
13187 return tem ? tem : expr;
13197 tree op0 = TREE_OPERAND (t, 0);
13198 tree op1 = TREE_OPERAND (t, 1);
13200 if (TREE_CODE (op1) == INTEGER_CST
13201 && TREE_CODE (op0) == CONSTRUCTOR
13202 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13204 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13205 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13206 unsigned HOST_WIDE_INT begin = 0;
13208 /* Find a matching index by means of a binary search. */
13209 while (begin != end)
13211 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13212 tree index = VEC_index (constructor_elt, elts, middle)->index;
13214 if (TREE_CODE (index) == INTEGER_CST
13215 && tree_int_cst_lt (index, op1))
13216 begin = middle + 1;
13217 else if (TREE_CODE (index) == INTEGER_CST
13218 && tree_int_cst_lt (op1, index))
13220 else if (TREE_CODE (index) == RANGE_EXPR
13221 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13222 begin = middle + 1;
13223 else if (TREE_CODE (index) == RANGE_EXPR
13224 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13227 return VEC_index (constructor_elt, elts, middle)->value;
13235 return fold (DECL_INITIAL (t));
13239 } /* switch (code) */
13242 #ifdef ENABLE_FOLD_CHECKING
13245 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13246 static void fold_check_failed (const_tree, const_tree);
13247 void print_fold_checksum (const_tree);
13249 /* When --enable-checking=fold, compute a digest of expr before
13250 and after actual fold call to see if fold did not accidentally
13251 change original expr. */
13257 struct md5_ctx ctx;
13258 unsigned char checksum_before[16], checksum_after[16];
13261 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13262 md5_init_ctx (&ctx);
13263 fold_checksum_tree (expr, &ctx, ht);
13264 md5_finish_ctx (&ctx, checksum_before);
13267 ret = fold_1 (expr);
13269 md5_init_ctx (&ctx);
13270 fold_checksum_tree (expr, &ctx, ht);
13271 md5_finish_ctx (&ctx, checksum_after);
13274 if (memcmp (checksum_before, checksum_after, 16))
13275 fold_check_failed (expr, ret);
13281 print_fold_checksum (const_tree expr)
13283 struct md5_ctx ctx;
13284 unsigned char checksum[16], cnt;
13287 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13288 md5_init_ctx (&ctx);
13289 fold_checksum_tree (expr, &ctx, ht);
13290 md5_finish_ctx (&ctx, checksum);
13292 for (cnt = 0; cnt < 16; ++cnt)
13293 fprintf (stderr, "%02x", checksum[cnt]);
13294 putc ('\n', stderr);
13298 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13300 internal_error ("fold check: original tree changed by fold");
13304 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13307 enum tree_code code;
13308 struct tree_function_decl buf;
13313 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13314 <= sizeof (struct tree_function_decl))
13315 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13318 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13322 code = TREE_CODE (expr);
13323 if (TREE_CODE_CLASS (code) == tcc_declaration
13324 && DECL_ASSEMBLER_NAME_SET_P (expr))
13326 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13327 memcpy ((char *) &buf, expr, tree_size (expr));
13328 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13329 expr = (tree) &buf;
13331 else if (TREE_CODE_CLASS (code) == tcc_type
13332 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13333 || TYPE_CACHED_VALUES_P (expr)
13334 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13336 /* Allow these fields to be modified. */
13338 memcpy ((char *) &buf, expr, tree_size (expr));
13339 expr = tmp = (tree) &buf;
13340 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13341 TYPE_POINTER_TO (tmp) = NULL;
13342 TYPE_REFERENCE_TO (tmp) = NULL;
13343 if (TYPE_CACHED_VALUES_P (tmp))
13345 TYPE_CACHED_VALUES_P (tmp) = 0;
13346 TYPE_CACHED_VALUES (tmp) = NULL;
13349 md5_process_bytes (expr, tree_size (expr), ctx);
13350 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13351 if (TREE_CODE_CLASS (code) != tcc_type
13352 && TREE_CODE_CLASS (code) != tcc_declaration
13353 && code != TREE_LIST
13354 && code != SSA_NAME)
13355 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13356 switch (TREE_CODE_CLASS (code))
13362 md5_process_bytes (TREE_STRING_POINTER (expr),
13363 TREE_STRING_LENGTH (expr), ctx);
13366 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13367 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13370 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13376 case tcc_exceptional:
13380 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13381 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13382 expr = TREE_CHAIN (expr);
13383 goto recursive_label;
13386 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13387 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13393 case tcc_expression:
13394 case tcc_reference:
13395 case tcc_comparison:
13398 case tcc_statement:
13400 len = TREE_OPERAND_LENGTH (expr);
13401 for (i = 0; i < len; ++i)
13402 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13404 case tcc_declaration:
13405 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13406 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13407 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13409 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13410 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13411 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13412 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13413 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13415 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13416 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13418 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13420 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13421 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13422 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13426 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13427 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13428 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13429 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13430 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13431 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13432 if (INTEGRAL_TYPE_P (expr)
13433 || SCALAR_FLOAT_TYPE_P (expr))
13435 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13436 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13438 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13439 if (TREE_CODE (expr) == RECORD_TYPE
13440 || TREE_CODE (expr) == UNION_TYPE
13441 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13442 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13443 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13450 /* Helper function for outputting the checksum of a tree T. When
13451 debugging with gdb, you can "define mynext" to be "next" followed
13452 by "call debug_fold_checksum (op0)", then just trace down till the
13456 debug_fold_checksum (const_tree t)
13459 unsigned char checksum[16];
13460 struct md5_ctx ctx;
13461 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13463 md5_init_ctx (&ctx);
13464 fold_checksum_tree (t, &ctx, ht);
13465 md5_finish_ctx (&ctx, checksum);
13468 for (i = 0; i < 16; i++)
13469 fprintf (stderr, "%d ", checksum[i]);
13471 fprintf (stderr, "\n");
13476 /* Fold a unary tree expression with code CODE of type TYPE with an
13477 operand OP0. Return a folded expression if successful. Otherwise,
13478 return a tree expression with code CODE of type TYPE with an
13482 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13485 #ifdef ENABLE_FOLD_CHECKING
13486 unsigned char checksum_before[16], checksum_after[16];
13487 struct md5_ctx ctx;
13490 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13491 md5_init_ctx (&ctx);
13492 fold_checksum_tree (op0, &ctx, ht);
13493 md5_finish_ctx (&ctx, checksum_before);
13497 tem = fold_unary (code, type, op0);
13499 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13501 #ifdef ENABLE_FOLD_CHECKING
13502 md5_init_ctx (&ctx);
13503 fold_checksum_tree (op0, &ctx, ht);
13504 md5_finish_ctx (&ctx, checksum_after);
13507 if (memcmp (checksum_before, checksum_after, 16))
13508 fold_check_failed (op0, tem);
13513 /* Fold a binary tree expression with code CODE of type TYPE with
13514 operands OP0 and OP1. Return a folded expression if successful.
13515 Otherwise, return a tree expression with code CODE of type TYPE
13516 with operands OP0 and OP1. */
13519 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13523 #ifdef ENABLE_FOLD_CHECKING
13524 unsigned char checksum_before_op0[16],
13525 checksum_before_op1[16],
13526 checksum_after_op0[16],
13527 checksum_after_op1[16];
13528 struct md5_ctx ctx;
13531 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13532 md5_init_ctx (&ctx);
13533 fold_checksum_tree (op0, &ctx, ht);
13534 md5_finish_ctx (&ctx, checksum_before_op0);
13537 md5_init_ctx (&ctx);
13538 fold_checksum_tree (op1, &ctx, ht);
13539 md5_finish_ctx (&ctx, checksum_before_op1);
13543 tem = fold_binary (code, type, op0, op1);
13545 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13547 #ifdef ENABLE_FOLD_CHECKING
13548 md5_init_ctx (&ctx);
13549 fold_checksum_tree (op0, &ctx, ht);
13550 md5_finish_ctx (&ctx, checksum_after_op0);
13553 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13554 fold_check_failed (op0, tem);
13556 md5_init_ctx (&ctx);
13557 fold_checksum_tree (op1, &ctx, ht);
13558 md5_finish_ctx (&ctx, checksum_after_op1);
13561 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13562 fold_check_failed (op1, tem);
13567 /* Fold a ternary tree expression with code CODE of type TYPE with
13568 operands OP0, OP1, and OP2. Return a folded expression if
13569 successful. Otherwise, return a tree expression with code CODE of
13570 type TYPE with operands OP0, OP1, and OP2. */
13573 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13577 #ifdef ENABLE_FOLD_CHECKING
13578 unsigned char checksum_before_op0[16],
13579 checksum_before_op1[16],
13580 checksum_before_op2[16],
13581 checksum_after_op0[16],
13582 checksum_after_op1[16],
13583 checksum_after_op2[16];
13584 struct md5_ctx ctx;
13587 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13588 md5_init_ctx (&ctx);
13589 fold_checksum_tree (op0, &ctx, ht);
13590 md5_finish_ctx (&ctx, checksum_before_op0);
13593 md5_init_ctx (&ctx);
13594 fold_checksum_tree (op1, &ctx, ht);
13595 md5_finish_ctx (&ctx, checksum_before_op1);
13598 md5_init_ctx (&ctx);
13599 fold_checksum_tree (op2, &ctx, ht);
13600 md5_finish_ctx (&ctx, checksum_before_op2);
13604 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13605 tem = fold_ternary (code, type, op0, op1, op2);
13607 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13609 #ifdef ENABLE_FOLD_CHECKING
13610 md5_init_ctx (&ctx);
13611 fold_checksum_tree (op0, &ctx, ht);
13612 md5_finish_ctx (&ctx, checksum_after_op0);
13615 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13616 fold_check_failed (op0, tem);
13618 md5_init_ctx (&ctx);
13619 fold_checksum_tree (op1, &ctx, ht);
13620 md5_finish_ctx (&ctx, checksum_after_op1);
13623 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13624 fold_check_failed (op1, tem);
13626 md5_init_ctx (&ctx);
13627 fold_checksum_tree (op2, &ctx, ht);
13628 md5_finish_ctx (&ctx, checksum_after_op2);
13631 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13632 fold_check_failed (op2, tem);
13637 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13638 arguments in ARGARRAY, and a null static chain.
13639 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13640 of type TYPE from the given operands as constructed by build_call_array. */
13643 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13646 #ifdef ENABLE_FOLD_CHECKING
13647 unsigned char checksum_before_fn[16],
13648 checksum_before_arglist[16],
13649 checksum_after_fn[16],
13650 checksum_after_arglist[16];
13651 struct md5_ctx ctx;
13655 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13656 md5_init_ctx (&ctx);
13657 fold_checksum_tree (fn, &ctx, ht);
13658 md5_finish_ctx (&ctx, checksum_before_fn);
13661 md5_init_ctx (&ctx);
13662 for (i = 0; i < nargs; i++)
13663 fold_checksum_tree (argarray[i], &ctx, ht);
13664 md5_finish_ctx (&ctx, checksum_before_arglist);
13668 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13670 #ifdef ENABLE_FOLD_CHECKING
13671 md5_init_ctx (&ctx);
13672 fold_checksum_tree (fn, &ctx, ht);
13673 md5_finish_ctx (&ctx, checksum_after_fn);
13676 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13677 fold_check_failed (fn, tem);
13679 md5_init_ctx (&ctx);
13680 for (i = 0; i < nargs; i++)
13681 fold_checksum_tree (argarray[i], &ctx, ht);
13682 md5_finish_ctx (&ctx, checksum_after_arglist);
13685 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13686 fold_check_failed (NULL_TREE, tem);
13691 /* Perform constant folding and related simplification of initializer
13692 expression EXPR. These behave identically to "fold_buildN" but ignore
13693 potential run-time traps and exceptions that fold must preserve. */
13695 #define START_FOLD_INIT \
13696 int saved_signaling_nans = flag_signaling_nans;\
13697 int saved_trapping_math = flag_trapping_math;\
13698 int saved_rounding_math = flag_rounding_math;\
13699 int saved_trapv = flag_trapv;\
13700 int saved_folding_initializer = folding_initializer;\
13701 flag_signaling_nans = 0;\
13702 flag_trapping_math = 0;\
13703 flag_rounding_math = 0;\
13705 folding_initializer = 1;
13707 #define END_FOLD_INIT \
13708 flag_signaling_nans = saved_signaling_nans;\
13709 flag_trapping_math = saved_trapping_math;\
13710 flag_rounding_math = saved_rounding_math;\
13711 flag_trapv = saved_trapv;\
13712 folding_initializer = saved_folding_initializer;
13715 fold_build1_initializer (enum tree_code code, tree type, tree op)
13720 result = fold_build1 (code, type, op);
13727 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13732 result = fold_build2 (code, type, op0, op1);
13739 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13745 result = fold_build3 (code, type, op0, op1, op2);
13752 fold_build_call_array_initializer (tree type, tree fn,
13753 int nargs, tree *argarray)
13758 result = fold_build_call_array (type, fn, nargs, argarray);
13764 #undef START_FOLD_INIT
13765 #undef END_FOLD_INIT
13767 /* Determine if first argument is a multiple of second argument. Return 0 if
13768 it is not, or we cannot easily determined it to be.
13770 An example of the sort of thing we care about (at this point; this routine
13771 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13772 fold cases do now) is discovering that
13774 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13780 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13782 This code also handles discovering that
13784 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13786 is a multiple of 8 so we don't have to worry about dealing with a
13787 possible remainder.
13789 Note that we *look* inside a SAVE_EXPR only to determine how it was
13790 calculated; it is not safe for fold to do much of anything else with the
13791 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13792 at run time. For example, the latter example above *cannot* be implemented
13793 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13794 evaluation time of the original SAVE_EXPR is not necessarily the same at
13795 the time the new expression is evaluated. The only optimization of this
13796 sort that would be valid is changing
13798 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13802 SAVE_EXPR (I) * SAVE_EXPR (J)
13804 (where the same SAVE_EXPR (J) is used in the original and the
13805 transformed version). */
13808 multiple_of_p (tree type, const_tree top, const_tree bottom)
13810 if (operand_equal_p (top, bottom, 0))
13813 if (TREE_CODE (type) != INTEGER_TYPE)
13816 switch (TREE_CODE (top))
13819 /* Bitwise and provides a power of two multiple. If the mask is
13820 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13821 if (!integer_pow2p (bottom))
13826 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13827 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13831 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13832 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13835 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13839 op1 = TREE_OPERAND (top, 1);
13840 /* const_binop may not detect overflow correctly,
13841 so check for it explicitly here. */
13842 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13843 > TREE_INT_CST_LOW (op1)
13844 && TREE_INT_CST_HIGH (op1) == 0
13845 && 0 != (t1 = fold_convert (type,
13846 const_binop (LSHIFT_EXPR,
13849 && !TREE_OVERFLOW (t1))
13850 return multiple_of_p (type, t1, bottom);
13855 /* Can't handle conversions from non-integral or wider integral type. */
13856 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13857 || (TYPE_PRECISION (type)
13858 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13861 /* .. fall through ... */
13864 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13867 if (TREE_CODE (bottom) != INTEGER_CST
13868 || integer_zerop (bottom)
13869 || (TYPE_UNSIGNED (type)
13870 && (tree_int_cst_sgn (top) < 0
13871 || tree_int_cst_sgn (bottom) < 0)))
13873 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13881 /* Return true if CODE or TYPE is known to be non-negative. */
13884 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13886 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13887 && truth_value_p (code))
13888 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13889 have a signed:1 type (where the value is -1 and 0). */
13894 /* Return true if (CODE OP0) is known to be non-negative. If the return
13895 value is based on the assumption that signed overflow is undefined,
13896 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13897 *STRICT_OVERFLOW_P. */
13900 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13901 bool *strict_overflow_p)
13903 if (TYPE_UNSIGNED (type))
13909 /* We can't return 1 if flag_wrapv is set because
13910 ABS_EXPR<INT_MIN> = INT_MIN. */
13911 if (!INTEGRAL_TYPE_P (type))
13913 if (TYPE_OVERFLOW_UNDEFINED (type))
13915 *strict_overflow_p = true;
13920 case NON_LVALUE_EXPR:
13922 case FIX_TRUNC_EXPR:
13923 return tree_expr_nonnegative_warnv_p (op0,
13924 strict_overflow_p);
13928 tree inner_type = TREE_TYPE (op0);
13929 tree outer_type = type;
13931 if (TREE_CODE (outer_type) == REAL_TYPE)
13933 if (TREE_CODE (inner_type) == REAL_TYPE)
13934 return tree_expr_nonnegative_warnv_p (op0,
13935 strict_overflow_p);
13936 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13938 if (TYPE_UNSIGNED (inner_type))
13940 return tree_expr_nonnegative_warnv_p (op0,
13941 strict_overflow_p);
13944 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13946 if (TREE_CODE (inner_type) == REAL_TYPE)
13947 return tree_expr_nonnegative_warnv_p (op0,
13948 strict_overflow_p);
13949 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13950 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13951 && TYPE_UNSIGNED (inner_type);
13957 return tree_simple_nonnegative_warnv_p (code, type);
13960 /* We don't know sign of `t', so be conservative and return false. */
13964 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13965 value is based on the assumption that signed overflow is undefined,
13966 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13967 *STRICT_OVERFLOW_P. */
13970 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13971 tree op1, bool *strict_overflow_p)
13973 if (TYPE_UNSIGNED (type))
13978 case POINTER_PLUS_EXPR:
13980 if (FLOAT_TYPE_P (type))
13981 return (tree_expr_nonnegative_warnv_p (op0,
13983 && tree_expr_nonnegative_warnv_p (op1,
13984 strict_overflow_p));
13986 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13987 both unsigned and at least 2 bits shorter than the result. */
13988 if (TREE_CODE (type) == INTEGER_TYPE
13989 && TREE_CODE (op0) == NOP_EXPR
13990 && TREE_CODE (op1) == NOP_EXPR)
13992 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13993 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13994 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13995 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13997 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13998 TYPE_PRECISION (inner2)) + 1;
13999 return prec < TYPE_PRECISION (type);
14005 if (FLOAT_TYPE_P (type))
14007 /* x * x for floating point x is always non-negative. */
14008 if (operand_equal_p (op0, op1, 0))
14010 return (tree_expr_nonnegative_warnv_p (op0,
14012 && tree_expr_nonnegative_warnv_p (op1,
14013 strict_overflow_p));
14016 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14017 both unsigned and their total bits is shorter than the result. */
14018 if (TREE_CODE (type) == INTEGER_TYPE
14019 && TREE_CODE (op0) == NOP_EXPR
14020 && TREE_CODE (op1) == NOP_EXPR)
14022 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14023 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14024 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14025 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14026 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14027 < TYPE_PRECISION (type);
14033 return (tree_expr_nonnegative_warnv_p (op0,
14035 || tree_expr_nonnegative_warnv_p (op1,
14036 strict_overflow_p));
14042 case TRUNC_DIV_EXPR:
14043 case CEIL_DIV_EXPR:
14044 case FLOOR_DIV_EXPR:
14045 case ROUND_DIV_EXPR:
14046 return (tree_expr_nonnegative_warnv_p (op0,
14048 && tree_expr_nonnegative_warnv_p (op1,
14049 strict_overflow_p));
14051 case TRUNC_MOD_EXPR:
14052 case CEIL_MOD_EXPR:
14053 case FLOOR_MOD_EXPR:
14054 case ROUND_MOD_EXPR:
14055 return tree_expr_nonnegative_warnv_p (op0,
14056 strict_overflow_p);
14058 return tree_simple_nonnegative_warnv_p (code, type);
14061 /* We don't know sign of `t', so be conservative and return false. */
14065 /* Return true if T is known to be non-negative. If the return
14066 value is based on the assumption that signed overflow is undefined,
14067 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14068 *STRICT_OVERFLOW_P. */
14071 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14073 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14076 switch (TREE_CODE (t))
14079 /* Query VRP to see if it has recorded any information about
14080 the range of this object. */
14081 return ssa_name_nonnegative_p (t);
14084 return tree_int_cst_sgn (t) >= 0;
14087 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14090 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14093 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14095 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14096 strict_overflow_p));
14098 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14101 /* We don't know sign of `t', so be conservative and return false. */
14105 /* Return true if T is known to be non-negative. If the return
14106 value is based on the assumption that signed overflow is undefined,
14107 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14108 *STRICT_OVERFLOW_P. */
14111 tree_call_nonnegative_warnv_p (enum tree_code code, tree type, tree fndecl,
14112 tree arg0, tree arg1, bool *strict_overflow_p)
14114 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14115 switch (DECL_FUNCTION_CODE (fndecl))
14117 CASE_FLT_FN (BUILT_IN_ACOS):
14118 CASE_FLT_FN (BUILT_IN_ACOSH):
14119 CASE_FLT_FN (BUILT_IN_CABS):
14120 CASE_FLT_FN (BUILT_IN_COSH):
14121 CASE_FLT_FN (BUILT_IN_ERFC):
14122 CASE_FLT_FN (BUILT_IN_EXP):
14123 CASE_FLT_FN (BUILT_IN_EXP10):
14124 CASE_FLT_FN (BUILT_IN_EXP2):
14125 CASE_FLT_FN (BUILT_IN_FABS):
14126 CASE_FLT_FN (BUILT_IN_FDIM):
14127 CASE_FLT_FN (BUILT_IN_HYPOT):
14128 CASE_FLT_FN (BUILT_IN_POW10):
14129 CASE_INT_FN (BUILT_IN_FFS):
14130 CASE_INT_FN (BUILT_IN_PARITY):
14131 CASE_INT_FN (BUILT_IN_POPCOUNT):
14132 case BUILT_IN_BSWAP32:
14133 case BUILT_IN_BSWAP64:
14137 CASE_FLT_FN (BUILT_IN_SQRT):
14138 /* sqrt(-0.0) is -0.0. */
14139 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14141 return tree_expr_nonnegative_warnv_p (arg0,
14142 strict_overflow_p);
14144 CASE_FLT_FN (BUILT_IN_ASINH):
14145 CASE_FLT_FN (BUILT_IN_ATAN):
14146 CASE_FLT_FN (BUILT_IN_ATANH):
14147 CASE_FLT_FN (BUILT_IN_CBRT):
14148 CASE_FLT_FN (BUILT_IN_CEIL):
14149 CASE_FLT_FN (BUILT_IN_ERF):
14150 CASE_FLT_FN (BUILT_IN_EXPM1):
14151 CASE_FLT_FN (BUILT_IN_FLOOR):
14152 CASE_FLT_FN (BUILT_IN_FMOD):
14153 CASE_FLT_FN (BUILT_IN_FREXP):
14154 CASE_FLT_FN (BUILT_IN_LCEIL):
14155 CASE_FLT_FN (BUILT_IN_LDEXP):
14156 CASE_FLT_FN (BUILT_IN_LFLOOR):
14157 CASE_FLT_FN (BUILT_IN_LLCEIL):
14158 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14159 CASE_FLT_FN (BUILT_IN_LLRINT):
14160 CASE_FLT_FN (BUILT_IN_LLROUND):
14161 CASE_FLT_FN (BUILT_IN_LRINT):
14162 CASE_FLT_FN (BUILT_IN_LROUND):
14163 CASE_FLT_FN (BUILT_IN_MODF):
14164 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14165 CASE_FLT_FN (BUILT_IN_RINT):
14166 CASE_FLT_FN (BUILT_IN_ROUND):
14167 CASE_FLT_FN (BUILT_IN_SCALB):
14168 CASE_FLT_FN (BUILT_IN_SCALBLN):
14169 CASE_FLT_FN (BUILT_IN_SCALBN):
14170 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14171 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14172 CASE_FLT_FN (BUILT_IN_SINH):
14173 CASE_FLT_FN (BUILT_IN_TANH):
14174 CASE_FLT_FN (BUILT_IN_TRUNC):
14175 /* True if the 1st argument is nonnegative. */
14176 return tree_expr_nonnegative_warnv_p (arg0,
14177 strict_overflow_p);
14179 CASE_FLT_FN (BUILT_IN_FMAX):
14180 /* True if the 1st OR 2nd arguments are nonnegative. */
14181 return (tree_expr_nonnegative_warnv_p (arg0,
14183 || (tree_expr_nonnegative_warnv_p (arg1,
14184 strict_overflow_p)));
14186 CASE_FLT_FN (BUILT_IN_FMIN):
14187 /* True if the 1st AND 2nd arguments are nonnegative. */
14188 return (tree_expr_nonnegative_warnv_p (arg0,
14190 && (tree_expr_nonnegative_warnv_p (arg1,
14191 strict_overflow_p)));
14193 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14194 /* True if the 2nd argument is nonnegative. */
14195 return tree_expr_nonnegative_warnv_p (arg1,
14196 strict_overflow_p);
14198 CASE_FLT_FN (BUILT_IN_POWI):
14199 /* True if the 1st argument is nonnegative or the second
14200 argument is an even integer. */
14201 if (TREE_CODE (arg1) == INTEGER_CST
14202 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14204 return tree_expr_nonnegative_warnv_p (arg0,
14205 strict_overflow_p);
14207 CASE_FLT_FN (BUILT_IN_POW):
14208 /* True if the 1st argument is nonnegative or the second
14209 argument is an even integer valued real. */
14210 if (TREE_CODE (arg1) == REAL_CST)
14215 c = TREE_REAL_CST (arg1);
14216 n = real_to_integer (&c);
14219 REAL_VALUE_TYPE cint;
14220 real_from_integer (&cint, VOIDmode, n,
14221 n < 0 ? -1 : 0, 0);
14222 if (real_identical (&c, &cint))
14226 return tree_expr_nonnegative_warnv_p (arg0,
14227 strict_overflow_p);
14232 return tree_simple_nonnegative_warnv_p (code,
14236 /* Return true if T is known to be non-negative. If the return
14237 value is based on the assumption that signed overflow is undefined,
14238 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14239 *STRICT_OVERFLOW_P. */
14242 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14244 enum tree_code code = TREE_CODE (t);
14245 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14252 tree temp = TARGET_EXPR_SLOT (t);
14253 t = TARGET_EXPR_INITIAL (t);
14255 /* If the initializer is non-void, then it's a normal expression
14256 that will be assigned to the slot. */
14257 if (!VOID_TYPE_P (t))
14258 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14260 /* Otherwise, the initializer sets the slot in some way. One common
14261 way is an assignment statement at the end of the initializer. */
14264 if (TREE_CODE (t) == BIND_EXPR)
14265 t = expr_last (BIND_EXPR_BODY (t));
14266 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14267 || TREE_CODE (t) == TRY_CATCH_EXPR)
14268 t = expr_last (TREE_OPERAND (t, 0));
14269 else if (TREE_CODE (t) == STATEMENT_LIST)
14274 if ((TREE_CODE (t) == MODIFY_EXPR
14275 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14276 && GENERIC_TREE_OPERAND (t, 0) == temp)
14277 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14278 strict_overflow_p);
14285 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14286 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14288 return tree_call_nonnegative_warnv_p (TREE_CODE (t),
14290 get_callee_fndecl (t),
14293 strict_overflow_p);
14295 case COMPOUND_EXPR:
14297 case GIMPLE_MODIFY_STMT:
14298 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14299 strict_overflow_p);
14301 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14302 strict_overflow_p);
14304 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14305 strict_overflow_p);
14308 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14312 /* We don't know sign of `t', so be conservative and return false. */
14316 /* Return true if T is known to be non-negative. If the return
14317 value is based on the assumption that signed overflow is undefined,
14318 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14319 *STRICT_OVERFLOW_P. */
14322 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14324 enum tree_code code;
14325 if (t == error_mark_node)
14328 code = TREE_CODE (t);
14329 switch (TREE_CODE_CLASS (code))
14332 case tcc_comparison:
14333 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14335 TREE_OPERAND (t, 0),
14336 TREE_OPERAND (t, 1),
14337 strict_overflow_p);
14340 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14342 TREE_OPERAND (t, 0),
14343 strict_overflow_p);
14346 case tcc_declaration:
14347 case tcc_reference:
14348 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14356 case TRUTH_AND_EXPR:
14357 case TRUTH_OR_EXPR:
14358 case TRUTH_XOR_EXPR:
14359 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14361 TREE_OPERAND (t, 0),
14362 TREE_OPERAND (t, 1),
14363 strict_overflow_p);
14364 case TRUTH_NOT_EXPR:
14365 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14367 TREE_OPERAND (t, 0),
14368 strict_overflow_p);
14375 case WITH_SIZE_EXPR:
14379 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14382 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14386 /* Return true if `t' is known to be non-negative. Handle warnings
14387 about undefined signed overflow. */
14390 tree_expr_nonnegative_p (tree t)
14392 bool ret, strict_overflow_p;
14394 strict_overflow_p = false;
14395 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14396 if (strict_overflow_p)
14397 fold_overflow_warning (("assuming signed overflow does not occur when "
14398 "determining that expression is always "
14400 WARN_STRICT_OVERFLOW_MISC);
14405 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14406 For floating point we further ensure that T is not denormal.
14407 Similar logic is present in nonzero_address in rtlanal.h.
14409 If the return value is based on the assumption that signed overflow
14410 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14411 change *STRICT_OVERFLOW_P. */
14414 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14415 bool *strict_overflow_p)
14420 return tree_expr_nonzero_warnv_p (op0,
14421 strict_overflow_p);
14425 tree inner_type = TREE_TYPE (op0);
14426 tree outer_type = type;
14428 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14429 && tree_expr_nonzero_warnv_p (op0,
14430 strict_overflow_p));
14434 case NON_LVALUE_EXPR:
14435 return tree_expr_nonzero_warnv_p (op0,
14436 strict_overflow_p);
14445 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14446 For floating point we further ensure that T is not denormal.
14447 Similar logic is present in nonzero_address in rtlanal.h.
14449 If the return value is based on the assumption that signed overflow
14450 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14451 change *STRICT_OVERFLOW_P. */
14454 tree_binary_nonzero_warnv_p (enum tree_code code,
14457 tree op1, bool *strict_overflow_p)
14459 bool sub_strict_overflow_p;
14462 case POINTER_PLUS_EXPR:
14464 if (TYPE_OVERFLOW_UNDEFINED (type))
14466 /* With the presence of negative values it is hard
14467 to say something. */
14468 sub_strict_overflow_p = false;
14469 if (!tree_expr_nonnegative_warnv_p (op0,
14470 &sub_strict_overflow_p)
14471 || !tree_expr_nonnegative_warnv_p (op1,
14472 &sub_strict_overflow_p))
14474 /* One of operands must be positive and the other non-negative. */
14475 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14476 overflows, on a twos-complement machine the sum of two
14477 nonnegative numbers can never be zero. */
14478 return (tree_expr_nonzero_warnv_p (op0,
14480 || tree_expr_nonzero_warnv_p (op1,
14481 strict_overflow_p));
14486 if (TYPE_OVERFLOW_UNDEFINED (type))
14488 if (tree_expr_nonzero_warnv_p (op0,
14490 && tree_expr_nonzero_warnv_p (op1,
14491 strict_overflow_p))
14493 *strict_overflow_p = true;
14500 sub_strict_overflow_p = false;
14501 if (tree_expr_nonzero_warnv_p (op0,
14502 &sub_strict_overflow_p)
14503 && tree_expr_nonzero_warnv_p (op1,
14504 &sub_strict_overflow_p))
14506 if (sub_strict_overflow_p)
14507 *strict_overflow_p = true;
14512 sub_strict_overflow_p = false;
14513 if (tree_expr_nonzero_warnv_p (op0,
14514 &sub_strict_overflow_p))
14516 if (sub_strict_overflow_p)
14517 *strict_overflow_p = true;
14519 /* When both operands are nonzero, then MAX must be too. */
14520 if (tree_expr_nonzero_warnv_p (op1,
14521 strict_overflow_p))
14524 /* MAX where operand 0 is positive is positive. */
14525 return tree_expr_nonnegative_warnv_p (op0,
14526 strict_overflow_p);
14528 /* MAX where operand 1 is positive is positive. */
14529 else if (tree_expr_nonzero_warnv_p (op1,
14530 &sub_strict_overflow_p)
14531 && tree_expr_nonnegative_warnv_p (op1,
14532 &sub_strict_overflow_p))
14534 if (sub_strict_overflow_p)
14535 *strict_overflow_p = true;
14541 return (tree_expr_nonzero_warnv_p (op1,
14543 || tree_expr_nonzero_warnv_p (op0,
14544 strict_overflow_p));
14553 /* Return true when T is an address and is known to be nonzero.
14554 For floating point we further ensure that T is not denormal.
14555 Similar logic is present in nonzero_address in rtlanal.h.
14557 If the return value is based on the assumption that signed overflow
14558 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14559 change *STRICT_OVERFLOW_P. */
14562 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14564 bool sub_strict_overflow_p;
14565 switch (TREE_CODE (t))
14568 /* Query VRP to see if it has recorded any information about
14569 the range of this object. */
14570 return ssa_name_nonzero_p (t);
14573 return !integer_zerop (t);
14577 tree base = get_base_address (TREE_OPERAND (t, 0));
14582 /* Weak declarations may link to NULL. */
14583 if (VAR_OR_FUNCTION_DECL_P (base))
14584 return !DECL_WEAK (base);
14586 /* Constants are never weak. */
14587 if (CONSTANT_CLASS_P (base))
14594 sub_strict_overflow_p = false;
14595 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14596 &sub_strict_overflow_p)
14597 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14598 &sub_strict_overflow_p))
14600 if (sub_strict_overflow_p)
14601 *strict_overflow_p = true;
14612 /* Return true when T is an address and is known to be nonzero.
14613 For floating point we further ensure that T is not denormal.
14614 Similar logic is present in nonzero_address in rtlanal.h.
14616 If the return value is based on the assumption that signed overflow
14617 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14618 change *STRICT_OVERFLOW_P. */
14621 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14623 tree type = TREE_TYPE (t);
14624 enum tree_code code;
14626 /* Doing something useful for floating point would need more work. */
14627 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14630 code = TREE_CODE (t);
14631 switch (TREE_CODE_CLASS (code))
14634 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14635 strict_overflow_p);
14637 case tcc_comparison:
14638 return tree_binary_nonzero_warnv_p (code, type,
14639 TREE_OPERAND (t, 0),
14640 TREE_OPERAND (t, 1),
14641 strict_overflow_p);
14643 case tcc_declaration:
14644 case tcc_reference:
14645 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14653 case TRUTH_NOT_EXPR:
14654 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14655 strict_overflow_p);
14657 case TRUTH_AND_EXPR:
14658 case TRUTH_OR_EXPR:
14659 case TRUTH_XOR_EXPR:
14660 return tree_binary_nonzero_warnv_p (code, type,
14661 TREE_OPERAND (t, 0),
14662 TREE_OPERAND (t, 1),
14663 strict_overflow_p);
14670 case WITH_SIZE_EXPR:
14674 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14676 case COMPOUND_EXPR:
14678 case GIMPLE_MODIFY_STMT:
14680 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14681 strict_overflow_p);
14684 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14685 strict_overflow_p);
14688 return alloca_call_p (t);
14696 /* Return true when T is an address and is known to be nonzero.
14697 Handle warnings about undefined signed overflow. */
14700 tree_expr_nonzero_p (tree t)
14702 bool ret, strict_overflow_p;
14704 strict_overflow_p = false;
14705 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14706 if (strict_overflow_p)
14707 fold_overflow_warning (("assuming signed overflow does not occur when "
14708 "determining that expression is always "
14710 WARN_STRICT_OVERFLOW_MISC);
14714 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14715 attempt to fold the expression to a constant without modifying TYPE,
14718 If the expression could be simplified to a constant, then return
14719 the constant. If the expression would not be simplified to a
14720 constant, then return NULL_TREE. */
14723 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14725 tree tem = fold_binary (code, type, op0, op1);
14726 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14729 /* Given the components of a unary expression CODE, TYPE and OP0,
14730 attempt to fold the expression to a constant without modifying
14733 If the expression could be simplified to a constant, then return
14734 the constant. If the expression would not be simplified to a
14735 constant, then return NULL_TREE. */
14738 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14740 tree tem = fold_unary (code, type, op0);
14741 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14744 /* If EXP represents referencing an element in a constant string
14745 (either via pointer arithmetic or array indexing), return the
14746 tree representing the value accessed, otherwise return NULL. */
14749 fold_read_from_constant_string (tree exp)
14751 if ((TREE_CODE (exp) == INDIRECT_REF
14752 || TREE_CODE (exp) == ARRAY_REF)
14753 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14755 tree exp1 = TREE_OPERAND (exp, 0);
14759 if (TREE_CODE (exp) == INDIRECT_REF)
14760 string = string_constant (exp1, &index);
14763 tree low_bound = array_ref_low_bound (exp);
14764 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14766 /* Optimize the special-case of a zero lower bound.
14768 We convert the low_bound to sizetype to avoid some problems
14769 with constant folding. (E.g. suppose the lower bound is 1,
14770 and its mode is QI. Without the conversion,l (ARRAY
14771 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14772 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14773 if (! integer_zerop (low_bound))
14774 index = size_diffop (index, fold_convert (sizetype, low_bound));
14780 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14781 && TREE_CODE (string) == STRING_CST
14782 && TREE_CODE (index) == INTEGER_CST
14783 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14784 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14786 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14787 return build_int_cst_type (TREE_TYPE (exp),
14788 (TREE_STRING_POINTER (string)
14789 [TREE_INT_CST_LOW (index)]));
14794 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14795 an integer constant, real, or fixed-point constant.
14797 TYPE is the type of the result. */
14800 fold_negate_const (tree arg0, tree type)
14802 tree t = NULL_TREE;
14804 switch (TREE_CODE (arg0))
14808 unsigned HOST_WIDE_INT low;
14809 HOST_WIDE_INT high;
14810 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14811 TREE_INT_CST_HIGH (arg0),
14813 t = force_fit_type_double (type, low, high, 1,
14814 (overflow | TREE_OVERFLOW (arg0))
14815 && !TYPE_UNSIGNED (type));
14820 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14825 FIXED_VALUE_TYPE f;
14826 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14827 &(TREE_FIXED_CST (arg0)), NULL,
14828 TYPE_SATURATING (type));
14829 t = build_fixed (type, f);
14830 /* Propagate overflow flags. */
14831 if (overflow_p | TREE_OVERFLOW (arg0))
14833 TREE_OVERFLOW (t) = 1;
14834 TREE_CONSTANT_OVERFLOW (t) = 1;
14836 else if (TREE_CONSTANT_OVERFLOW (arg0))
14837 TREE_CONSTANT_OVERFLOW (t) = 1;
14842 gcc_unreachable ();
14848 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14849 an integer constant or real constant.
14851 TYPE is the type of the result. */
14854 fold_abs_const (tree arg0, tree type)
14856 tree t = NULL_TREE;
14858 switch (TREE_CODE (arg0))
14861 /* If the value is unsigned, then the absolute value is
14862 the same as the ordinary value. */
14863 if (TYPE_UNSIGNED (type))
14865 /* Similarly, if the value is non-negative. */
14866 else if (INT_CST_LT (integer_minus_one_node, arg0))
14868 /* If the value is negative, then the absolute value is
14872 unsigned HOST_WIDE_INT low;
14873 HOST_WIDE_INT high;
14874 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14875 TREE_INT_CST_HIGH (arg0),
14877 t = force_fit_type_double (type, low, high, -1,
14878 overflow | TREE_OVERFLOW (arg0));
14883 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14884 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14890 gcc_unreachable ();
14896 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14897 constant. TYPE is the type of the result. */
14900 fold_not_const (tree arg0, tree type)
14902 tree t = NULL_TREE;
14904 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14906 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14907 ~TREE_INT_CST_HIGH (arg0), 0,
14908 TREE_OVERFLOW (arg0));
14913 /* Given CODE, a relational operator, the target type, TYPE and two
14914 constant operands OP0 and OP1, return the result of the
14915 relational operation. If the result is not a compile time
14916 constant, then return NULL_TREE. */
14919 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14921 int result, invert;
14923 /* From here on, the only cases we handle are when the result is
14924 known to be a constant. */
14926 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14928 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14929 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14931 /* Handle the cases where either operand is a NaN. */
14932 if (real_isnan (c0) || real_isnan (c1))
14942 case UNORDERED_EXPR:
14956 if (flag_trapping_math)
14962 gcc_unreachable ();
14965 return constant_boolean_node (result, type);
14968 return constant_boolean_node (real_compare (code, c0, c1), type);
14971 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14973 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14974 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14975 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14978 /* Handle equality/inequality of complex constants. */
14979 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14981 tree rcond = fold_relational_const (code, type,
14982 TREE_REALPART (op0),
14983 TREE_REALPART (op1));
14984 tree icond = fold_relational_const (code, type,
14985 TREE_IMAGPART (op0),
14986 TREE_IMAGPART (op1));
14987 if (code == EQ_EXPR)
14988 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14989 else if (code == NE_EXPR)
14990 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14995 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14997 To compute GT, swap the arguments and do LT.
14998 To compute GE, do LT and invert the result.
14999 To compute LE, swap the arguments, do LT and invert the result.
15000 To compute NE, do EQ and invert the result.
15002 Therefore, the code below must handle only EQ and LT. */
15004 if (code == LE_EXPR || code == GT_EXPR)
15009 code = swap_tree_comparison (code);
15012 /* Note that it is safe to invert for real values here because we
15013 have already handled the one case that it matters. */
15016 if (code == NE_EXPR || code == GE_EXPR)
15019 code = invert_tree_comparison (code, false);
15022 /* Compute a result for LT or EQ if args permit;
15023 Otherwise return T. */
15024 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15026 if (code == EQ_EXPR)
15027 result = tree_int_cst_equal (op0, op1);
15028 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15029 result = INT_CST_LT_UNSIGNED (op0, op1);
15031 result = INT_CST_LT (op0, op1);
15038 return constant_boolean_node (result, type);
15041 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15042 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15046 fold_build_cleanup_point_expr (tree type, tree expr)
15048 /* If the expression does not have side effects then we don't have to wrap
15049 it with a cleanup point expression. */
15050 if (!TREE_SIDE_EFFECTS (expr))
15053 /* If the expression is a return, check to see if the expression inside the
15054 return has no side effects or the right hand side of the modify expression
15055 inside the return. If either don't have side effects set we don't need to
15056 wrap the expression in a cleanup point expression. Note we don't check the
15057 left hand side of the modify because it should always be a return decl. */
15058 if (TREE_CODE (expr) == RETURN_EXPR)
15060 tree op = TREE_OPERAND (expr, 0);
15061 if (!op || !TREE_SIDE_EFFECTS (op))
15063 op = TREE_OPERAND (op, 1);
15064 if (!TREE_SIDE_EFFECTS (op))
15068 return build1 (CLEANUP_POINT_EXPR, type, expr);
15071 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15072 of an indirection through OP0, or NULL_TREE if no simplification is
15076 fold_indirect_ref_1 (tree type, tree op0)
15082 subtype = TREE_TYPE (sub);
15083 if (!POINTER_TYPE_P (subtype))
15086 if (TREE_CODE (sub) == ADDR_EXPR)
15088 tree op = TREE_OPERAND (sub, 0);
15089 tree optype = TREE_TYPE (op);
15090 /* *&CONST_DECL -> to the value of the const decl. */
15091 if (TREE_CODE (op) == CONST_DECL)
15092 return DECL_INITIAL (op);
15093 /* *&p => p; make sure to handle *&"str"[cst] here. */
15094 if (type == optype)
15096 tree fop = fold_read_from_constant_string (op);
15102 /* *(foo *)&fooarray => fooarray[0] */
15103 else if (TREE_CODE (optype) == ARRAY_TYPE
15104 && type == TREE_TYPE (optype))
15106 tree type_domain = TYPE_DOMAIN (optype);
15107 tree min_val = size_zero_node;
15108 if (type_domain && TYPE_MIN_VALUE (type_domain))
15109 min_val = TYPE_MIN_VALUE (type_domain);
15110 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15112 /* *(foo *)&complexfoo => __real__ complexfoo */
15113 else if (TREE_CODE (optype) == COMPLEX_TYPE
15114 && type == TREE_TYPE (optype))
15115 return fold_build1 (REALPART_EXPR, type, op);
15116 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15117 else if (TREE_CODE (optype) == VECTOR_TYPE
15118 && type == TREE_TYPE (optype))
15120 tree part_width = TYPE_SIZE (type);
15121 tree index = bitsize_int (0);
15122 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15126 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15127 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15128 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15130 tree op00 = TREE_OPERAND (sub, 0);
15131 tree op01 = TREE_OPERAND (sub, 1);
15135 op00type = TREE_TYPE (op00);
15136 if (TREE_CODE (op00) == ADDR_EXPR
15137 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15138 && type == TREE_TYPE (TREE_TYPE (op00type)))
15140 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15141 tree part_width = TYPE_SIZE (type);
15142 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15143 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15144 tree index = bitsize_int (indexi);
15146 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15147 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15148 part_width, index);
15154 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15155 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15156 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15158 tree op00 = TREE_OPERAND (sub, 0);
15159 tree op01 = TREE_OPERAND (sub, 1);
15163 op00type = TREE_TYPE (op00);
15164 if (TREE_CODE (op00) == ADDR_EXPR
15165 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15166 && type == TREE_TYPE (TREE_TYPE (op00type)))
15168 tree size = TYPE_SIZE_UNIT (type);
15169 if (tree_int_cst_equal (size, op01))
15170 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15174 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15175 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15176 && type == TREE_TYPE (TREE_TYPE (subtype)))
15179 tree min_val = size_zero_node;
15180 sub = build_fold_indirect_ref (sub);
15181 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15182 if (type_domain && TYPE_MIN_VALUE (type_domain))
15183 min_val = TYPE_MIN_VALUE (type_domain);
15184 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15190 /* Builds an expression for an indirection through T, simplifying some
15194 build_fold_indirect_ref (tree t)
15196 tree type = TREE_TYPE (TREE_TYPE (t));
15197 tree sub = fold_indirect_ref_1 (type, t);
15202 return build1 (INDIRECT_REF, type, t);
15205 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15208 fold_indirect_ref (tree t)
15210 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15218 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15219 whose result is ignored. The type of the returned tree need not be
15220 the same as the original expression. */
15223 fold_ignored_result (tree t)
15225 if (!TREE_SIDE_EFFECTS (t))
15226 return integer_zero_node;
15229 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15232 t = TREE_OPERAND (t, 0);
15236 case tcc_comparison:
15237 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15238 t = TREE_OPERAND (t, 0);
15239 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15240 t = TREE_OPERAND (t, 1);
15245 case tcc_expression:
15246 switch (TREE_CODE (t))
15248 case COMPOUND_EXPR:
15249 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15251 t = TREE_OPERAND (t, 0);
15255 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15256 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15258 t = TREE_OPERAND (t, 0);
15271 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15272 This can only be applied to objects of a sizetype. */
15275 round_up (tree value, int divisor)
15277 tree div = NULL_TREE;
15279 gcc_assert (divisor > 0);
15283 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15284 have to do anything. Only do this when we are not given a const,
15285 because in that case, this check is more expensive than just
15287 if (TREE_CODE (value) != INTEGER_CST)
15289 div = build_int_cst (TREE_TYPE (value), divisor);
15291 if (multiple_of_p (TREE_TYPE (value), value, div))
15295 /* If divisor is a power of two, simplify this to bit manipulation. */
15296 if (divisor == (divisor & -divisor))
15298 if (TREE_CODE (value) == INTEGER_CST)
15300 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15301 unsigned HOST_WIDE_INT high;
15304 if ((low & (divisor - 1)) == 0)
15307 overflow_p = TREE_OVERFLOW (value);
15308 high = TREE_INT_CST_HIGH (value);
15309 low &= ~(divisor - 1);
15318 return force_fit_type_double (TREE_TYPE (value), low, high,
15325 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15326 value = size_binop (PLUS_EXPR, value, t);
15327 t = build_int_cst (TREE_TYPE (value), -divisor);
15328 value = size_binop (BIT_AND_EXPR, value, t);
15334 div = build_int_cst (TREE_TYPE (value), divisor);
15335 value = size_binop (CEIL_DIV_EXPR, value, div);
15336 value = size_binop (MULT_EXPR, value, div);
15342 /* Likewise, but round down. */
15345 round_down (tree value, int divisor)
15347 tree div = NULL_TREE;
15349 gcc_assert (divisor > 0);
15353 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15354 have to do anything. Only do this when we are not given a const,
15355 because in that case, this check is more expensive than just
15357 if (TREE_CODE (value) != INTEGER_CST)
15359 div = build_int_cst (TREE_TYPE (value), divisor);
15361 if (multiple_of_p (TREE_TYPE (value), value, div))
15365 /* If divisor is a power of two, simplify this to bit manipulation. */
15366 if (divisor == (divisor & -divisor))
15370 t = build_int_cst (TREE_TYPE (value), -divisor);
15371 value = size_binop (BIT_AND_EXPR, value, t);
15376 div = build_int_cst (TREE_TYPE (value), divisor);
15377 value = size_binop (FLOOR_DIV_EXPR, value, div);
15378 value = size_binop (MULT_EXPR, value, div);
15384 /* Returns the pointer to the base of the object addressed by EXP and
15385 extracts the information about the offset of the access, storing it
15386 to PBITPOS and POFFSET. */
15389 split_address_to_core_and_offset (tree exp,
15390 HOST_WIDE_INT *pbitpos, tree *poffset)
15393 enum machine_mode mode;
15394 int unsignedp, volatilep;
15395 HOST_WIDE_INT bitsize;
15397 if (TREE_CODE (exp) == ADDR_EXPR)
15399 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15400 poffset, &mode, &unsignedp, &volatilep,
15402 core = fold_addr_expr (core);
15408 *poffset = NULL_TREE;
15414 /* Returns true if addresses of E1 and E2 differ by a constant, false
15415 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15418 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15421 HOST_WIDE_INT bitpos1, bitpos2;
15422 tree toffset1, toffset2, tdiff, type;
15424 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15425 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15427 if (bitpos1 % BITS_PER_UNIT != 0
15428 || bitpos2 % BITS_PER_UNIT != 0
15429 || !operand_equal_p (core1, core2, 0))
15432 if (toffset1 && toffset2)
15434 type = TREE_TYPE (toffset1);
15435 if (type != TREE_TYPE (toffset2))
15436 toffset2 = fold_convert (type, toffset2);
15438 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15439 if (!cst_and_fits_in_hwi (tdiff))
15442 *diff = int_cst_value (tdiff);
15444 else if (toffset1 || toffset2)
15446 /* If only one of the offsets is non-constant, the difference cannot
15453 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15457 /* Simplify the floating point expression EXP when the sign of the
15458 result is not significant. Return NULL_TREE if no simplification
15462 fold_strip_sign_ops (tree exp)
15466 switch (TREE_CODE (exp))
15470 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15471 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15475 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15477 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15478 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15479 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15480 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15481 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15482 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15485 case COMPOUND_EXPR:
15486 arg0 = TREE_OPERAND (exp, 0);
15487 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15489 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15493 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15494 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15496 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15497 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15498 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15503 const enum built_in_function fcode = builtin_mathfn_code (exp);
15506 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15507 /* Strip copysign function call, return the 1st argument. */
15508 arg0 = CALL_EXPR_ARG (exp, 0);
15509 arg1 = CALL_EXPR_ARG (exp, 1);
15510 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15513 /* Strip sign ops from the argument of "odd" math functions. */
15514 if (negate_mathfn_p (fcode))
15516 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15518 return build_call_expr (get_callee_fndecl (exp), 1, arg0);