1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int truth_value_p (enum tree_code);
109 static int operand_equal_for_comparison_p (tree, tree, tree);
110 static int twoval_comparison_p (tree, tree *, tree *, int *);
111 static tree eval_subst (tree, tree, tree, tree, tree);
112 static tree pedantic_omit_one_operand (tree, tree, tree);
113 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
157 We do that by representing the two-word integer in 4 words, with only
158 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
159 number. The value of the word is LOWPART + HIGHPART * BASE. */
162 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
163 #define HIGHPART(x) \
164 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
165 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167 /* Unpack a two-word integer into 4 words.
168 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
169 WORDS points to the array of HOST_WIDE_INTs. */
172 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 words[0] = LOWPART (low);
175 words[1] = HIGHPART (low);
176 words[2] = LOWPART (hi);
177 words[3] = HIGHPART (hi);
180 /* Pack an array of 4 words into a two-word integer.
181 WORDS points to the array of words.
182 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 *low = words[0] + words[1] * BASE;
189 *hi = words[2] + words[3] * BASE;
192 /* Force the double-word integer L1, H1 to be within the range of the
193 integer type TYPE. Stores the properly truncated and sign-extended
194 double-word integer in *LV, *HV. Returns true if the operation
195 overflows, that is, argument and result are different. */
198 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
199 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
201 unsigned HOST_WIDE_INT low0 = l1;
202 HOST_WIDE_INT high0 = h1;
204 int sign_extended_type;
206 if (POINTER_TYPE_P (type)
207 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = TYPE_PRECISION (type);
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
283 int sign_extended_type;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 unsigned HOST_WIDE_INT l;
329 h = h1 + h2 + (l < l1);
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 return (*hv & h1) < 0;
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
386 memset (prod, 0, sizeof prod);
388 for (i = 0; i < 4; i++)
391 for (j = 0; j < 4; j++)
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
407 /* Unsigned overflow is immediate. */
409 return (toplow | tophigh) != 0;
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 unsigned HOST_WIDE_INT signmask;
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
445 if (SHIFT_COUNT_TRUNCATED)
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
455 else if (count >= HOST_BITS_PER_WIDE_INT)
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
467 /* Sign extend all bits that are beyond the precision. */
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 unsigned HOST_WIDE_INT signmask;
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 if (SHIFT_COUNT_TRUNCATED)
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
516 else if (count >= HOST_BITS_PER_WIDE_INT)
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 /* Zero / sign extend all bits that are beyond the precision. */
530 if (count >= (HOST_WIDE_INT)prec)
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
629 /* Calculate quotient sign and convert operands to unsigned. */
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
643 neg_double (lden, hden, &lden, &hden);
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
650 /* This unsigned division rounds toward zero. */
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
664 memset (quo, 0, sizeof quo);
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
701 scale = BASE / (den[den_hi_sig] + 1);
703 { /* scale divisor and dividend */
705 for (i = 0; i <= 4 - 1; i++)
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
714 for (i = 0; i <= 4 - 1; i++)
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
752 for (j = 0; j <= den_hi_sig; j++)
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
774 num [num_hi_sig] += carry;
777 /* Store the quotient digit. */
782 decode (quo, lquo, hquo);
785 /* If result is negative, make it so. */
787 neg_double (*lquo, *hquo, lquo, hquo);
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
825 case ROUND_MOD_EXPR: /* round to closest integer */
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
832 /* Get absolute values. */
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
836 neg_double (lden, hden, &labs_den, &habs_den);
838 /* If (2 * abs (lrem) >= abs (lden)) */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, <wice, &htwice);
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den < ltwice)))
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 unsigned HOST_WIDE_INT int1l, int2l;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 tree type = TREE_TYPE (arg1);
885 int uns = TYPE_UNSIGNED (type);
887 int1l = TREE_INT_CST_LOW (arg1);
888 int1h = TREE_INT_CST_HIGH (arg1);
889 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
890 &obj[some_exotic_number]. */
891 if (POINTER_TYPE_P (type))
894 type = signed_type_for (type);
895 fit_double_type (int1l, int1h, &int1l, &int1h,
899 fit_double_type (int1l, int1h, &int1l, &int1h, type);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
908 return build_int_cst_wide (type, quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = code;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
975 if (gimple_no_warning_p (stmt))
978 /* Use the smallest code level when deciding to issue the
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
987 locus = input_location;
989 locus = gimple_location (stmt);
990 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1198 tree tem = strip_float_extensions (t);
1200 return negate_expr_p (tem);
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1234 fold_negate_expr (tree t)
1236 tree type = TREE_TYPE (t);
1239 switch (TREE_CODE (t))
1241 /* Convert - (~A) to A + 1. */
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 tem = fold_negate_const (t, type);
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1280 if (negate_expr_p (t))
1281 return fold_build2 (COMPLEX_EXPR, type,
1282 fold_negate_expr (TREE_OPERAND (t, 0)),
1283 fold_negate_expr (TREE_OPERAND (t, 1)));
1287 if (negate_expr_p (t))
1288 return fold_build1 (CONJ_EXPR, type,
1289 fold_negate_expr (TREE_OPERAND (t, 0)));
1293 return TREE_OPERAND (t, 0);
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2 (MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2 (MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2 (MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1329 if (TYPE_UNSIGNED (type))
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2 (TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2 (TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2 (TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert (type, negate_expr (tem));
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr (fndecl, 1, arg);
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert (type, temp);
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1439 negate_expr (tree t)
1446 type = TREE_TYPE (t);
1447 STRIP_SIGN_NOPS (t);
1449 tem = fold_negate_expr (t);
1451 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1452 return fold_convert (type, tem);
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1470 If IN is itself a literal or constant, return it as appropriate.
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1476 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1477 tree *minus_litp, int negate_p)
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in);
1488 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1489 || TREE_CODE (in) == FIXED_CST)
1491 else if (TREE_CODE (in) == code
1492 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1499 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1501 tree op0 = TREE_OPERAND (in, 0);
1502 tree op1 = TREE_OPERAND (in, 1);
1503 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1504 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1508 || TREE_CODE (op0) == FIXED_CST)
1509 *litp = op0, op0 = 0;
1510 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1511 || TREE_CODE (op1) == FIXED_CST)
1512 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1514 if (op0 != 0 && TREE_CONSTANT (op0))
1515 *conp = op0, op0 = 0;
1516 else if (op1 != 0 && TREE_CONSTANT (op1))
1517 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0 != 0 && op1 != 0)
1526 var = op1, neg_var_p = neg1_p;
1528 /* Now do any needed negations. */
1530 *minus_litp = *litp, *litp = 0;
1532 *conp = negate_expr (*conp);
1534 var = negate_expr (var);
1536 else if (TREE_CONSTANT (in))
1544 *minus_litp = *litp, *litp = 0;
1545 else if (*minus_litp)
1546 *litp = *minus_litp, *minus_litp = 0;
1547 *conp = negate_expr (*conp);
1548 var = negate_expr (var);
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1559 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1570 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1572 if (code == PLUS_EXPR)
1574 if (TREE_CODE (t1) == NEGATE_EXPR)
1575 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1576 fold_convert (type, TREE_OPERAND (t1, 0)));
1577 else if (TREE_CODE (t2) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1579 fold_convert (type, TREE_OPERAND (t2, 0)));
1580 else if (integer_zerop (t2))
1581 return fold_convert (type, t1);
1583 else if (code == MINUS_EXPR)
1585 if (integer_zerop (t2))
1586 return fold_convert (type, t1);
1589 return build2 (code, type, fold_convert (type, t1),
1590 fold_convert (type, t2));
1593 return fold_build2 (code, type, fold_convert (type, t1),
1594 fold_convert (type, t2));
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1601 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1603 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1620 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1621 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1622 && TYPE_MODE (type1) == TYPE_MODE (type2);
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1633 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1635 unsigned HOST_WIDE_INT int1l, int2l;
1636 HOST_WIDE_INT int1h, int2h;
1637 unsigned HOST_WIDE_INT low;
1639 unsigned HOST_WIDE_INT garbagel;
1640 HOST_WIDE_INT garbageh;
1642 tree type = TREE_TYPE (arg1);
1643 int uns = TYPE_UNSIGNED (type);
1645 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1648 int1l = TREE_INT_CST_LOW (arg1);
1649 int1h = TREE_INT_CST_HIGH (arg1);
1650 int2l = TREE_INT_CST_LOW (arg2);
1651 int2h = TREE_INT_CST_HIGH (arg2);
1656 low = int1l | int2l, hi = int1h | int2h;
1660 low = int1l ^ int2l, hi = int1h ^ int2h;
1664 low = int1l & int2l, hi = int1h & int2h;
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1685 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 neg_double (int2l, int2h, &low, &hi);
1690 add_double (int1l, int1h, low, hi, &low, &hi);
1691 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1695 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 if (code == CEIL_DIV_EXPR)
1710 low = int1l / int2l, hi = 0;
1714 /* ... fall through ... */
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1719 if (int2h == 0 && int2l == 1)
1721 low = int1l, hi = int1h;
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 if (code == CEIL_MOD_EXPR)
1744 low = int1l % int2l, hi = 0;
1748 /* ... fall through ... */
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1773 low = int2l, hi = int2h;
1782 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1789 TREE_OVERFLOW (t) = 1;
1793 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1794 ((!uns || is_sizetype) && overflow)
1795 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1808 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1810 /* Sanity check for the recursive cases. */
1817 if (TREE_CODE (arg1) == INTEGER_CST)
1818 return int_const_binop (code, arg1, arg2, notrunc);
1820 if (TREE_CODE (arg1) == REAL_CST)
1822 enum machine_mode mode;
1825 REAL_VALUE_TYPE value;
1826 REAL_VALUE_TYPE result;
1830 /* The following codes are handled by real_arithmetic. */
1845 d1 = TREE_REAL_CST (arg1);
1846 d2 = TREE_REAL_CST (arg2);
1848 type = TREE_TYPE (arg1);
1849 mode = TYPE_MODE (type);
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode)
1854 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code == RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2, dconst0)
1861 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1))
1868 else if (REAL_VALUE_ISNAN (d2))
1871 inexact = real_arithmetic (&value, code, &d1, &d2);
1872 real_convert (&result, mode, &value);
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode)
1878 && REAL_VALUE_ISINF (result)
1879 && !REAL_VALUE_ISINF (d1)
1880 && !REAL_VALUE_ISINF (d2))
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1889 && !flag_unsafe_math_optimizations))
1890 && (inexact || !real_identical (&result, &value)))
1893 t = build_real (type, result);
1895 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1899 if (TREE_CODE (arg1) == FIXED_CST)
1901 FIXED_VALUE_TYPE f1;
1902 FIXED_VALUE_TYPE f2;
1903 FIXED_VALUE_TYPE result;
1908 /* The following codes are handled by fixed_arithmetic. */
1914 case TRUNC_DIV_EXPR:
1915 f2 = TREE_FIXED_CST (arg2);
1920 f2.data.high = TREE_INT_CST_HIGH (arg2);
1921 f2.data.low = TREE_INT_CST_LOW (arg2);
1929 f1 = TREE_FIXED_CST (arg1);
1930 type = TREE_TYPE (arg1);
1931 sat_p = TYPE_SATURATING (type);
1932 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1933 t = build_fixed (type, result);
1934 /* Propagate overflow flags. */
1935 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1937 TREE_OVERFLOW (t) = 1;
1938 TREE_CONSTANT_OVERFLOW (t) = 1;
1940 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1941 TREE_CONSTANT_OVERFLOW (t) = 1;
1945 if (TREE_CODE (arg1) == COMPLEX_CST)
1947 tree type = TREE_TYPE (arg1);
1948 tree r1 = TREE_REALPART (arg1);
1949 tree i1 = TREE_IMAGPART (arg1);
1950 tree r2 = TREE_REALPART (arg2);
1951 tree i2 = TREE_IMAGPART (arg2);
1958 real = const_binop (code, r1, r2, notrunc);
1959 imag = const_binop (code, i1, i2, notrunc);
1963 real = const_binop (MINUS_EXPR,
1964 const_binop (MULT_EXPR, r1, r2, notrunc),
1965 const_binop (MULT_EXPR, i1, i2, notrunc),
1967 imag = const_binop (PLUS_EXPR,
1968 const_binop (MULT_EXPR, r1, i2, notrunc),
1969 const_binop (MULT_EXPR, i1, r2, notrunc),
1976 = const_binop (PLUS_EXPR,
1977 const_binop (MULT_EXPR, r2, r2, notrunc),
1978 const_binop (MULT_EXPR, i2, i2, notrunc),
1981 = const_binop (PLUS_EXPR,
1982 const_binop (MULT_EXPR, r1, r2, notrunc),
1983 const_binop (MULT_EXPR, i1, i2, notrunc),
1986 = const_binop (MINUS_EXPR,
1987 const_binop (MULT_EXPR, i1, r2, notrunc),
1988 const_binop (MULT_EXPR, r1, i2, notrunc),
1991 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1992 code = TRUNC_DIV_EXPR;
1994 real = const_binop (code, t1, magsquared, notrunc);
1995 imag = const_binop (code, t2, magsquared, notrunc);
2004 return build_complex (type, real, imag);
2010 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2011 indicates which particular sizetype to create. */
2014 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2016 return build_int_cst (sizetype_tab[(int) kind], number);
2019 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2020 is a tree code. The type of the result is taken from the operands.
2021 Both must be equivalent integer types, ala int_binop_types_match_p.
2022 If the operands are constant, so is the result. */
2025 size_binop (enum tree_code code, tree arg0, tree arg1)
2027 tree type = TREE_TYPE (arg0);
2029 if (arg0 == error_mark_node || arg1 == error_mark_node)
2030 return error_mark_node;
2032 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2035 /* Handle the special case of two integer constants faster. */
2036 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2038 /* And some specific cases even faster than that. */
2039 if (code == PLUS_EXPR)
2041 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2043 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2046 else if (code == MINUS_EXPR)
2048 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2051 else if (code == MULT_EXPR)
2053 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2057 /* Handle general case of two integer constants. */
2058 return int_const_binop (code, arg0, arg1, 0);
2061 return fold_build2 (code, type, arg0, arg1);
2064 /* Given two values, either both of sizetype or both of bitsizetype,
2065 compute the difference between the two values. Return the value
2066 in signed type corresponding to the type of the operands. */
2069 size_diffop (tree arg0, tree arg1)
2071 tree type = TREE_TYPE (arg0);
2074 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2077 /* If the type is already signed, just do the simple thing. */
2078 if (!TYPE_UNSIGNED (type))
2079 return size_binop (MINUS_EXPR, arg0, arg1);
2081 if (type == sizetype)
2083 else if (type == bitsizetype)
2084 ctype = sbitsizetype;
2086 ctype = signed_type_for (type);
2088 /* If either operand is not a constant, do the conversions to the signed
2089 type and subtract. The hardware will do the right thing with any
2090 overflow in the subtraction. */
2091 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2092 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2093 fold_convert (ctype, arg1));
2095 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2096 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2097 overflow) and negate (which can't either). Special-case a result
2098 of zero while we're here. */
2099 if (tree_int_cst_equal (arg0, arg1))
2100 return build_int_cst (ctype, 0);
2101 else if (tree_int_cst_lt (arg1, arg0))
2102 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2104 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2105 fold_convert (ctype, size_binop (MINUS_EXPR,
2109 /* A subroutine of fold_convert_const handling conversions of an
2110 INTEGER_CST to another integer type. */
2113 fold_convert_const_int_from_int (tree type, const_tree arg1)
2117 /* Given an integer constant, make new constant with new type,
2118 appropriately sign-extended or truncated. */
2119 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2120 TREE_INT_CST_HIGH (arg1),
2121 /* Don't set the overflow when
2122 converting from a pointer, */
2123 !POINTER_TYPE_P (TREE_TYPE (arg1))
2124 /* or to a sizetype with same signedness
2125 and the precision is unchanged.
2126 ??? sizetype is always sign-extended,
2127 but its signedness depends on the
2128 frontend. Thus we see spurious overflows
2129 here if we do not check this. */
2130 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2131 == TYPE_PRECISION (type))
2132 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2133 == TYPE_UNSIGNED (type))
2134 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2135 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2136 || (TREE_CODE (type) == INTEGER_TYPE
2137 && TYPE_IS_SIZETYPE (type)))),
2138 (TREE_INT_CST_HIGH (arg1) < 0
2139 && (TYPE_UNSIGNED (type)
2140 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2141 | TREE_OVERFLOW (arg1));
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to an integer type. */
2150 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2155 /* The following code implements the floating point to integer
2156 conversion rules required by the Java Language Specification,
2157 that IEEE NaNs are mapped to zero and values that overflow
2158 the target precision saturate, i.e. values greater than
2159 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2160 are mapped to INT_MIN. These semantics are allowed by the
2161 C and C++ standards that simply state that the behavior of
2162 FP-to-integer conversion is unspecified upon overflow. */
2164 HOST_WIDE_INT high, low;
2166 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2170 case FIX_TRUNC_EXPR:
2171 real_trunc (&r, VOIDmode, &x);
2178 /* If R is NaN, return zero and show we have an overflow. */
2179 if (REAL_VALUE_ISNAN (r))
2186 /* See if R is less than the lower bound or greater than the
2191 tree lt = TYPE_MIN_VALUE (type);
2192 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2193 if (REAL_VALUES_LESS (r, l))
2196 high = TREE_INT_CST_HIGH (lt);
2197 low = TREE_INT_CST_LOW (lt);
2203 tree ut = TYPE_MAX_VALUE (type);
2206 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2207 if (REAL_VALUES_LESS (u, r))
2210 high = TREE_INT_CST_HIGH (ut);
2211 low = TREE_INT_CST_LOW (ut);
2217 REAL_VALUE_TO_INT (&low, &high, r);
2219 t = force_fit_type_double (type, low, high, -1,
2220 overflow | TREE_OVERFLOW (arg1));
2224 /* A subroutine of fold_convert_const handling conversions of a
2225 FIXED_CST to an integer type. */
2228 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2231 double_int temp, temp_trunc;
2234 /* Right shift FIXED_CST to temp by fbit. */
2235 temp = TREE_FIXED_CST (arg1).data;
2236 mode = TREE_FIXED_CST (arg1).mode;
2237 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2239 lshift_double (temp.low, temp.high,
2240 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2241 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2243 /* Left shift temp to temp_trunc by fbit. */
2244 lshift_double (temp.low, temp.high,
2245 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2246 &temp_trunc.low, &temp_trunc.high,
2247 SIGNED_FIXED_POINT_MODE_P (mode));
2254 temp_trunc.high = 0;
2257 /* If FIXED_CST is negative, we need to round the value toward 0.
2258 By checking if the fractional bits are not zero to add 1 to temp. */
2259 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2260 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2265 temp = double_int_add (temp, one);
2268 /* Given a fixed-point constant, make new constant with new type,
2269 appropriately sign-extended or truncated. */
2270 t = force_fit_type_double (type, temp.low, temp.high, -1,
2272 && (TYPE_UNSIGNED (type)
2273 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2274 | TREE_OVERFLOW (arg1));
2279 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2280 to another floating point type. */
2283 fold_convert_const_real_from_real (tree type, const_tree arg1)
2285 REAL_VALUE_TYPE value;
2288 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2289 t = build_real (type, value);
2291 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2295 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2296 to a floating point type. */
2299 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2301 REAL_VALUE_TYPE value;
2304 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2305 t = build_real (type, value);
2307 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2308 TREE_CONSTANT_OVERFLOW (t)
2309 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2313 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2314 to another fixed-point type. */
2317 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2319 FIXED_VALUE_TYPE value;
2323 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2324 TYPE_SATURATING (type));
2325 t = build_fixed (type, value);
2327 /* Propagate overflow flags. */
2328 if (overflow_p | TREE_OVERFLOW (arg1))
2330 TREE_OVERFLOW (t) = 1;
2331 TREE_CONSTANT_OVERFLOW (t) = 1;
2333 else if (TREE_CONSTANT_OVERFLOW (arg1))
2334 TREE_CONSTANT_OVERFLOW (t) = 1;
2338 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2339 to a fixed-point type. */
2342 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2344 FIXED_VALUE_TYPE value;
2348 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2349 TREE_INT_CST (arg1),
2350 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2351 TYPE_SATURATING (type));
2352 t = build_fixed (type, value);
2354 /* Propagate overflow flags. */
2355 if (overflow_p | TREE_OVERFLOW (arg1))
2357 TREE_OVERFLOW (t) = 1;
2358 TREE_CONSTANT_OVERFLOW (t) = 1;
2360 else if (TREE_CONSTANT_OVERFLOW (arg1))
2361 TREE_CONSTANT_OVERFLOW (t) = 1;
2365 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2366 to a fixed-point type. */
2369 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2371 FIXED_VALUE_TYPE value;
2375 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2376 &TREE_REAL_CST (arg1),
2377 TYPE_SATURATING (type));
2378 t = build_fixed (type, value);
2380 /* Propagate overflow flags. */
2381 if (overflow_p | TREE_OVERFLOW (arg1))
2383 TREE_OVERFLOW (t) = 1;
2384 TREE_CONSTANT_OVERFLOW (t) = 1;
2386 else if (TREE_CONSTANT_OVERFLOW (arg1))
2387 TREE_CONSTANT_OVERFLOW (t) = 1;
2391 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2392 type TYPE. If no simplification can be done return NULL_TREE. */
2395 fold_convert_const (enum tree_code code, tree type, tree arg1)
2397 if (TREE_TYPE (arg1) == type)
2400 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2401 || TREE_CODE (type) == OFFSET_TYPE)
2403 if (TREE_CODE (arg1) == INTEGER_CST)
2404 return fold_convert_const_int_from_int (type, arg1);
2405 else if (TREE_CODE (arg1) == REAL_CST)
2406 return fold_convert_const_int_from_real (code, type, arg1);
2407 else if (TREE_CODE (arg1) == FIXED_CST)
2408 return fold_convert_const_int_from_fixed (type, arg1);
2410 else if (TREE_CODE (type) == REAL_TYPE)
2412 if (TREE_CODE (arg1) == INTEGER_CST)
2413 return build_real_from_int_cst (type, arg1);
2414 else if (TREE_CODE (arg1) == REAL_CST)
2415 return fold_convert_const_real_from_real (type, arg1);
2416 else if (TREE_CODE (arg1) == FIXED_CST)
2417 return fold_convert_const_real_from_fixed (type, arg1);
2419 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2421 if (TREE_CODE (arg1) == FIXED_CST)
2422 return fold_convert_const_fixed_from_fixed (type, arg1);
2423 else if (TREE_CODE (arg1) == INTEGER_CST)
2424 return fold_convert_const_fixed_from_int (type, arg1);
2425 else if (TREE_CODE (arg1) == REAL_CST)
2426 return fold_convert_const_fixed_from_real (type, arg1);
2431 /* Construct a vector of zero elements of vector type TYPE. */
2434 build_zero_vector (tree type)
2439 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2440 units = TYPE_VECTOR_SUBPARTS (type);
2443 for (i = 0; i < units; i++)
2444 list = tree_cons (NULL_TREE, elem, list);
2445 return build_vector (type, list);
2448 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2451 fold_convertible_p (const_tree type, const_tree arg)
2453 tree orig = TREE_TYPE (arg);
2458 if (TREE_CODE (arg) == ERROR_MARK
2459 || TREE_CODE (type) == ERROR_MARK
2460 || TREE_CODE (orig) == ERROR_MARK)
2463 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2466 switch (TREE_CODE (type))
2468 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2469 case POINTER_TYPE: case REFERENCE_TYPE:
2471 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2472 || TREE_CODE (orig) == OFFSET_TYPE)
2474 return (TREE_CODE (orig) == VECTOR_TYPE
2475 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2478 case FIXED_POINT_TYPE:
2482 return TREE_CODE (type) == TREE_CODE (orig);
2489 /* Convert expression ARG to type TYPE. Used by the middle-end for
2490 simple conversions in preference to calling the front-end's convert. */
2493 fold_convert (tree type, tree arg)
2495 tree orig = TREE_TYPE (arg);
2501 if (TREE_CODE (arg) == ERROR_MARK
2502 || TREE_CODE (type) == ERROR_MARK
2503 || TREE_CODE (orig) == ERROR_MARK)
2504 return error_mark_node;
2506 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2507 return fold_build1 (NOP_EXPR, type, arg);
2509 switch (TREE_CODE (type))
2511 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2512 case POINTER_TYPE: case REFERENCE_TYPE:
2514 if (TREE_CODE (arg) == INTEGER_CST)
2516 tem = fold_convert_const (NOP_EXPR, type, arg);
2517 if (tem != NULL_TREE)
2520 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2521 || TREE_CODE (orig) == OFFSET_TYPE)
2522 return fold_build1 (NOP_EXPR, type, arg);
2523 if (TREE_CODE (orig) == COMPLEX_TYPE)
2525 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2526 return fold_convert (type, tem);
2528 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2529 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2530 return fold_build1 (NOP_EXPR, type, arg);
2533 if (TREE_CODE (arg) == INTEGER_CST)
2535 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2536 if (tem != NULL_TREE)
2539 else if (TREE_CODE (arg) == REAL_CST)
2541 tem = fold_convert_const (NOP_EXPR, type, arg);
2542 if (tem != NULL_TREE)
2545 else if (TREE_CODE (arg) == FIXED_CST)
2547 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2548 if (tem != NULL_TREE)
2552 switch (TREE_CODE (orig))
2555 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2556 case POINTER_TYPE: case REFERENCE_TYPE:
2557 return fold_build1 (FLOAT_EXPR, type, arg);
2560 return fold_build1 (NOP_EXPR, type, arg);
2562 case FIXED_POINT_TYPE:
2563 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2566 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2567 return fold_convert (type, tem);
2573 case FIXED_POINT_TYPE:
2574 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2575 || TREE_CODE (arg) == REAL_CST)
2577 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2578 if (tem != NULL_TREE)
2582 switch (TREE_CODE (orig))
2584 case FIXED_POINT_TYPE:
2589 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2592 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2593 return fold_convert (type, tem);
2600 switch (TREE_CODE (orig))
2603 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2604 case POINTER_TYPE: case REFERENCE_TYPE:
2606 case FIXED_POINT_TYPE:
2607 return build2 (COMPLEX_EXPR, type,
2608 fold_convert (TREE_TYPE (type), arg),
2609 fold_convert (TREE_TYPE (type), integer_zero_node));
2614 if (TREE_CODE (arg) == COMPLEX_EXPR)
2616 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2617 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2618 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2621 arg = save_expr (arg);
2622 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2623 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2624 rpart = fold_convert (TREE_TYPE (type), rpart);
2625 ipart = fold_convert (TREE_TYPE (type), ipart);
2626 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2634 if (integer_zerop (arg))
2635 return build_zero_vector (type);
2636 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2637 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2638 || TREE_CODE (orig) == VECTOR_TYPE);
2639 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2642 tem = fold_ignored_result (arg);
2643 if (TREE_CODE (tem) == MODIFY_EXPR)
2645 return fold_build1 (NOP_EXPR, type, tem);
2652 /* Return false if expr can be assumed not to be an lvalue, true
2656 maybe_lvalue_p (const_tree x)
2658 /* We only need to wrap lvalue tree codes. */
2659 switch (TREE_CODE (x))
2670 case ALIGN_INDIRECT_REF:
2671 case MISALIGNED_INDIRECT_REF:
2673 case ARRAY_RANGE_REF:
2679 case PREINCREMENT_EXPR:
2680 case PREDECREMENT_EXPR:
2682 case TRY_CATCH_EXPR:
2683 case WITH_CLEANUP_EXPR:
2694 /* Assume the worst for front-end tree codes. */
2695 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2703 /* Return an expr equal to X but certainly not valid as an lvalue. */
2708 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2713 if (! maybe_lvalue_p (x))
2715 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2718 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2719 Zero means allow extended lvalues. */
2721 int pedantic_lvalues;
2723 /* When pedantic, return an expr equal to X but certainly not valid as a
2724 pedantic lvalue. Otherwise, return X. */
2727 pedantic_non_lvalue (tree x)
2729 if (pedantic_lvalues)
2730 return non_lvalue (x);
2735 /* Given a tree comparison code, return the code that is the logical inverse
2736 of the given code. It is not safe to do this for floating-point
2737 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2738 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2741 invert_tree_comparison (enum tree_code code, bool honor_nans)
2743 if (honor_nans && flag_trapping_math)
2753 return honor_nans ? UNLE_EXPR : LE_EXPR;
2755 return honor_nans ? UNLT_EXPR : LT_EXPR;
2757 return honor_nans ? UNGE_EXPR : GE_EXPR;
2759 return honor_nans ? UNGT_EXPR : GT_EXPR;
2773 return UNORDERED_EXPR;
2774 case UNORDERED_EXPR:
2775 return ORDERED_EXPR;
2781 /* Similar, but return the comparison that results if the operands are
2782 swapped. This is safe for floating-point. */
2785 swap_tree_comparison (enum tree_code code)
2792 case UNORDERED_EXPR:
2818 /* Convert a comparison tree code from an enum tree_code representation
2819 into a compcode bit-based encoding. This function is the inverse of
2820 compcode_to_comparison. */
2822 static enum comparison_code
2823 comparison_to_compcode (enum tree_code code)
2840 return COMPCODE_ORD;
2841 case UNORDERED_EXPR:
2842 return COMPCODE_UNORD;
2844 return COMPCODE_UNLT;
2846 return COMPCODE_UNEQ;
2848 return COMPCODE_UNLE;
2850 return COMPCODE_UNGT;
2852 return COMPCODE_LTGT;
2854 return COMPCODE_UNGE;
2860 /* Convert a compcode bit-based encoding of a comparison operator back
2861 to GCC's enum tree_code representation. This function is the
2862 inverse of comparison_to_compcode. */
2864 static enum tree_code
2865 compcode_to_comparison (enum comparison_code code)
2882 return ORDERED_EXPR;
2883 case COMPCODE_UNORD:
2884 return UNORDERED_EXPR;
2902 /* Return a tree for the comparison which is the combination of
2903 doing the AND or OR (depending on CODE) of the two operations LCODE
2904 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2905 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2906 if this makes the transformation invalid. */
2909 combine_comparisons (enum tree_code code, enum tree_code lcode,
2910 enum tree_code rcode, tree truth_type,
2911 tree ll_arg, tree lr_arg)
2913 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2914 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2915 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2916 enum comparison_code compcode;
2920 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2921 compcode = lcompcode & rcompcode;
2924 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2925 compcode = lcompcode | rcompcode;
2934 /* Eliminate unordered comparisons, as well as LTGT and ORD
2935 which are not used unless the mode has NaNs. */
2936 compcode &= ~COMPCODE_UNORD;
2937 if (compcode == COMPCODE_LTGT)
2938 compcode = COMPCODE_NE;
2939 else if (compcode == COMPCODE_ORD)
2940 compcode = COMPCODE_TRUE;
2942 else if (flag_trapping_math)
2944 /* Check that the original operation and the optimized ones will trap
2945 under the same condition. */
2946 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2947 && (lcompcode != COMPCODE_EQ)
2948 && (lcompcode != COMPCODE_ORD);
2949 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2950 && (rcompcode != COMPCODE_EQ)
2951 && (rcompcode != COMPCODE_ORD);
2952 bool trap = (compcode & COMPCODE_UNORD) == 0
2953 && (compcode != COMPCODE_EQ)
2954 && (compcode != COMPCODE_ORD);
2956 /* In a short-circuited boolean expression the LHS might be
2957 such that the RHS, if evaluated, will never trap. For
2958 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2959 if neither x nor y is NaN. (This is a mixed blessing: for
2960 example, the expression above will never trap, hence
2961 optimizing it to x < y would be invalid). */
2962 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2963 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2966 /* If the comparison was short-circuited, and only the RHS
2967 trapped, we may now generate a spurious trap. */
2969 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2972 /* If we changed the conditions that cause a trap, we lose. */
2973 if ((ltrap || rtrap) != trap)
2977 if (compcode == COMPCODE_TRUE)
2978 return constant_boolean_node (true, truth_type);
2979 else if (compcode == COMPCODE_FALSE)
2980 return constant_boolean_node (false, truth_type);
2982 return fold_build2 (compcode_to_comparison (compcode),
2983 truth_type, ll_arg, lr_arg);
2986 /* Return nonzero if CODE is a tree code that represents a truth value. */
2989 truth_value_p (enum tree_code code)
2991 return (TREE_CODE_CLASS (code) == tcc_comparison
2992 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2993 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2994 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2997 /* Return nonzero if two operands (typically of the same tree node)
2998 are necessarily equal. If either argument has side-effects this
2999 function returns zero. FLAGS modifies behavior as follows:
3001 If OEP_ONLY_CONST is set, only return nonzero for constants.
3002 This function tests whether the operands are indistinguishable;
3003 it does not test whether they are equal using C's == operation.
3004 The distinction is important for IEEE floating point, because
3005 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3006 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3008 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3009 even though it may hold multiple values during a function.
3010 This is because a GCC tree node guarantees that nothing else is
3011 executed between the evaluation of its "operands" (which may often
3012 be evaluated in arbitrary order). Hence if the operands themselves
3013 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3014 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3015 unset means assuming isochronic (or instantaneous) tree equivalence.
3016 Unless comparing arbitrary expression trees, such as from different
3017 statements, this flag can usually be left unset.
3019 If OEP_PURE_SAME is set, then pure functions with identical arguments
3020 are considered the same. It is used when the caller has other ways
3021 to ensure that global memory is unchanged in between. */
3024 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3026 /* If either is ERROR_MARK, they aren't equal. */
3027 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3030 /* Check equality of integer constants before bailing out due to
3031 precision differences. */
3032 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3033 return tree_int_cst_equal (arg0, arg1);
3035 /* If both types don't have the same signedness, then we can't consider
3036 them equal. We must check this before the STRIP_NOPS calls
3037 because they may change the signedness of the arguments. As pointers
3038 strictly don't have a signedness, require either two pointers or
3039 two non-pointers as well. */
3040 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3041 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3044 /* If both types don't have the same precision, then it is not safe
3046 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3052 /* In case both args are comparisons but with different comparison
3053 code, try to swap the comparison operands of one arg to produce
3054 a match and compare that variant. */
3055 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3056 && COMPARISON_CLASS_P (arg0)
3057 && COMPARISON_CLASS_P (arg1))
3059 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3061 if (TREE_CODE (arg0) == swap_code)
3062 return operand_equal_p (TREE_OPERAND (arg0, 0),
3063 TREE_OPERAND (arg1, 1), flags)
3064 && operand_equal_p (TREE_OPERAND (arg0, 1),
3065 TREE_OPERAND (arg1, 0), flags);
3068 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3069 /* This is needed for conversions and for COMPONENT_REF.
3070 Might as well play it safe and always test this. */
3071 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3072 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3073 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3076 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3077 We don't care about side effects in that case because the SAVE_EXPR
3078 takes care of that for us. In all other cases, two expressions are
3079 equal if they have no side effects. If we have two identical
3080 expressions with side effects that should be treated the same due
3081 to the only side effects being identical SAVE_EXPR's, that will
3082 be detected in the recursive calls below. */
3083 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3084 && (TREE_CODE (arg0) == SAVE_EXPR
3085 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3088 /* Next handle constant cases, those for which we can return 1 even
3089 if ONLY_CONST is set. */
3090 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3091 switch (TREE_CODE (arg0))
3094 return tree_int_cst_equal (arg0, arg1);
3097 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3098 TREE_FIXED_CST (arg1));
3101 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3102 TREE_REAL_CST (arg1)))
3106 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3108 /* If we do not distinguish between signed and unsigned zero,
3109 consider them equal. */
3110 if (real_zerop (arg0) && real_zerop (arg1))
3119 v1 = TREE_VECTOR_CST_ELTS (arg0);
3120 v2 = TREE_VECTOR_CST_ELTS (arg1);
3123 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3126 v1 = TREE_CHAIN (v1);
3127 v2 = TREE_CHAIN (v2);
3134 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3136 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3140 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3141 && ! memcmp (TREE_STRING_POINTER (arg0),
3142 TREE_STRING_POINTER (arg1),
3143 TREE_STRING_LENGTH (arg0)));
3146 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3152 if (flags & OEP_ONLY_CONST)
3155 /* Define macros to test an operand from arg0 and arg1 for equality and a
3156 variant that allows null and views null as being different from any
3157 non-null value. In the latter case, if either is null, the both
3158 must be; otherwise, do the normal comparison. */
3159 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3160 TREE_OPERAND (arg1, N), flags)
3162 #define OP_SAME_WITH_NULL(N) \
3163 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3164 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3166 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3169 /* Two conversions are equal only if signedness and modes match. */
3170 switch (TREE_CODE (arg0))
3173 case FIX_TRUNC_EXPR:
3174 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3175 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3185 case tcc_comparison:
3187 if (OP_SAME (0) && OP_SAME (1))
3190 /* For commutative ops, allow the other order. */
3191 return (commutative_tree_code (TREE_CODE (arg0))
3192 && operand_equal_p (TREE_OPERAND (arg0, 0),
3193 TREE_OPERAND (arg1, 1), flags)
3194 && operand_equal_p (TREE_OPERAND (arg0, 1),
3195 TREE_OPERAND (arg1, 0), flags));
3198 /* If either of the pointer (or reference) expressions we are
3199 dereferencing contain a side effect, these cannot be equal. */
3200 if (TREE_SIDE_EFFECTS (arg0)
3201 || TREE_SIDE_EFFECTS (arg1))
3204 switch (TREE_CODE (arg0))
3207 case ALIGN_INDIRECT_REF:
3208 case MISALIGNED_INDIRECT_REF:
3214 case ARRAY_RANGE_REF:
3215 /* Operands 2 and 3 may be null.
3216 Compare the array index by value if it is constant first as we
3217 may have different types but same value here. */
3219 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3220 TREE_OPERAND (arg1, 1))
3222 && OP_SAME_WITH_NULL (2)
3223 && OP_SAME_WITH_NULL (3));
3226 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3227 may be NULL when we're called to compare MEM_EXPRs. */
3228 return OP_SAME_WITH_NULL (0)
3230 && OP_SAME_WITH_NULL (2);
3233 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3239 case tcc_expression:
3240 switch (TREE_CODE (arg0))
3243 case TRUTH_NOT_EXPR:
3246 case TRUTH_ANDIF_EXPR:
3247 case TRUTH_ORIF_EXPR:
3248 return OP_SAME (0) && OP_SAME (1);
3250 case TRUTH_AND_EXPR:
3252 case TRUTH_XOR_EXPR:
3253 if (OP_SAME (0) && OP_SAME (1))
3256 /* Otherwise take into account this is a commutative operation. */
3257 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3258 TREE_OPERAND (arg1, 1), flags)
3259 && operand_equal_p (TREE_OPERAND (arg0, 1),
3260 TREE_OPERAND (arg1, 0), flags));
3263 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3270 switch (TREE_CODE (arg0))
3273 /* If the CALL_EXPRs call different functions, then they
3274 clearly can not be equal. */
3275 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3280 unsigned int cef = call_expr_flags (arg0);
3281 if (flags & OEP_PURE_SAME)
3282 cef &= ECF_CONST | ECF_PURE;
3289 /* Now see if all the arguments are the same. */
3291 const_call_expr_arg_iterator iter0, iter1;
3293 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3294 a1 = first_const_call_expr_arg (arg1, &iter1);
3296 a0 = next_const_call_expr_arg (&iter0),
3297 a1 = next_const_call_expr_arg (&iter1))
3298 if (! operand_equal_p (a0, a1, flags))
3301 /* If we get here and both argument lists are exhausted
3302 then the CALL_EXPRs are equal. */
3303 return ! (a0 || a1);
3309 case tcc_declaration:
3310 /* Consider __builtin_sqrt equal to sqrt. */
3311 return (TREE_CODE (arg0) == FUNCTION_DECL
3312 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3313 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3314 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3321 #undef OP_SAME_WITH_NULL
3324 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3325 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3327 When in doubt, return 0. */
3330 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3332 int unsignedp1, unsignedpo;
3333 tree primarg0, primarg1, primother;
3334 unsigned int correct_width;
3336 if (operand_equal_p (arg0, arg1, 0))
3339 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3340 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3343 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3344 and see if the inner values are the same. This removes any
3345 signedness comparison, which doesn't matter here. */
3346 primarg0 = arg0, primarg1 = arg1;
3347 STRIP_NOPS (primarg0);
3348 STRIP_NOPS (primarg1);
3349 if (operand_equal_p (primarg0, primarg1, 0))
3352 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3353 actual comparison operand, ARG0.
3355 First throw away any conversions to wider types
3356 already present in the operands. */
3358 primarg1 = get_narrower (arg1, &unsignedp1);
3359 primother = get_narrower (other, &unsignedpo);
3361 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3362 if (unsignedp1 == unsignedpo
3363 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3364 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3366 tree type = TREE_TYPE (arg0);
3368 /* Make sure shorter operand is extended the right way
3369 to match the longer operand. */
3370 primarg1 = fold_convert (signed_or_unsigned_type_for
3371 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3373 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3380 /* See if ARG is an expression that is either a comparison or is performing
3381 arithmetic on comparisons. The comparisons must only be comparing
3382 two different values, which will be stored in *CVAL1 and *CVAL2; if
3383 they are nonzero it means that some operands have already been found.
3384 No variables may be used anywhere else in the expression except in the
3385 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3386 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3388 If this is true, return 1. Otherwise, return zero. */
3391 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3393 enum tree_code code = TREE_CODE (arg);
3394 enum tree_code_class class = TREE_CODE_CLASS (code);
3396 /* We can handle some of the tcc_expression cases here. */
3397 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3399 else if (class == tcc_expression
3400 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3401 || code == COMPOUND_EXPR))
3404 else if (class == tcc_expression && code == SAVE_EXPR
3405 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3407 /* If we've already found a CVAL1 or CVAL2, this expression is
3408 two complex to handle. */
3409 if (*cval1 || *cval2)
3419 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3422 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3423 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3424 cval1, cval2, save_p));
3429 case tcc_expression:
3430 if (code == COND_EXPR)
3431 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3432 cval1, cval2, save_p)
3433 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3434 cval1, cval2, save_p)
3435 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3436 cval1, cval2, save_p));
3439 case tcc_comparison:
3440 /* First see if we can handle the first operand, then the second. For
3441 the second operand, we know *CVAL1 can't be zero. It must be that
3442 one side of the comparison is each of the values; test for the
3443 case where this isn't true by failing if the two operands
3446 if (operand_equal_p (TREE_OPERAND (arg, 0),
3447 TREE_OPERAND (arg, 1), 0))
3451 *cval1 = TREE_OPERAND (arg, 0);
3452 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3454 else if (*cval2 == 0)
3455 *cval2 = TREE_OPERAND (arg, 0);
3456 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3461 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3463 else if (*cval2 == 0)
3464 *cval2 = TREE_OPERAND (arg, 1);
3465 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3477 /* ARG is a tree that is known to contain just arithmetic operations and
3478 comparisons. Evaluate the operations in the tree substituting NEW0 for
3479 any occurrence of OLD0 as an operand of a comparison and likewise for
3483 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3485 tree type = TREE_TYPE (arg);
3486 enum tree_code code = TREE_CODE (arg);
3487 enum tree_code_class class = TREE_CODE_CLASS (code);
3489 /* We can handle some of the tcc_expression cases here. */
3490 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3492 else if (class == tcc_expression
3493 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3499 return fold_build1 (code, type,
3500 eval_subst (TREE_OPERAND (arg, 0),
3501 old0, new0, old1, new1));
3504 return fold_build2 (code, type,
3505 eval_subst (TREE_OPERAND (arg, 0),
3506 old0, new0, old1, new1),
3507 eval_subst (TREE_OPERAND (arg, 1),
3508 old0, new0, old1, new1));
3510 case tcc_expression:
3514 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3517 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3520 return fold_build3 (code, type,
3521 eval_subst (TREE_OPERAND (arg, 0),
3522 old0, new0, old1, new1),
3523 eval_subst (TREE_OPERAND (arg, 1),
3524 old0, new0, old1, new1),
3525 eval_subst (TREE_OPERAND (arg, 2),
3526 old0, new0, old1, new1));
3530 /* Fall through - ??? */
3532 case tcc_comparison:
3534 tree arg0 = TREE_OPERAND (arg, 0);
3535 tree arg1 = TREE_OPERAND (arg, 1);
3537 /* We need to check both for exact equality and tree equality. The
3538 former will be true if the operand has a side-effect. In that
3539 case, we know the operand occurred exactly once. */
3541 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3543 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3546 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3548 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3551 return fold_build2 (code, type, arg0, arg1);
3559 /* Return a tree for the case when the result of an expression is RESULT
3560 converted to TYPE and OMITTED was previously an operand of the expression
3561 but is now not needed (e.g., we folded OMITTED * 0).
3563 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3564 the conversion of RESULT to TYPE. */
3567 omit_one_operand (tree type, tree result, tree omitted)
3569 tree t = fold_convert (type, result);
3571 /* If the resulting operand is an empty statement, just return the omitted
3572 statement casted to void. */
3573 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3574 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3576 if (TREE_SIDE_EFFECTS (omitted))
3577 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3579 return non_lvalue (t);
3582 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3585 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3587 tree t = fold_convert (type, result);
3589 /* If the resulting operand is an empty statement, just return the omitted
3590 statement casted to void. */
3591 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3592 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3594 if (TREE_SIDE_EFFECTS (omitted))
3595 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3597 return pedantic_non_lvalue (t);
3600 /* Return a tree for the case when the result of an expression is RESULT
3601 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3602 of the expression but are now not needed.
3604 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3605 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3606 evaluated before OMITTED2. Otherwise, if neither has side effects,
3607 just do the conversion of RESULT to TYPE. */
3610 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3612 tree t = fold_convert (type, result);
3614 if (TREE_SIDE_EFFECTS (omitted2))
3615 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3616 if (TREE_SIDE_EFFECTS (omitted1))
3617 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3619 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3623 /* Return a simplified tree node for the truth-negation of ARG. This
3624 never alters ARG itself. We assume that ARG is an operation that
3625 returns a truth value (0 or 1).
3627 FIXME: one would think we would fold the result, but it causes
3628 problems with the dominator optimizer. */
3631 fold_truth_not_expr (tree arg)
3633 tree type = TREE_TYPE (arg);
3634 enum tree_code code = TREE_CODE (arg);
3636 /* If this is a comparison, we can simply invert it, except for
3637 floating-point non-equality comparisons, in which case we just
3638 enclose a TRUTH_NOT_EXPR around what we have. */
3640 if (TREE_CODE_CLASS (code) == tcc_comparison)
3642 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3643 if (FLOAT_TYPE_P (op_type)
3644 && flag_trapping_math
3645 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3646 && code != NE_EXPR && code != EQ_EXPR)
3650 code = invert_tree_comparison (code,
3651 HONOR_NANS (TYPE_MODE (op_type)));
3652 if (code == ERROR_MARK)
3655 return build2 (code, type,
3656 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3663 return constant_boolean_node (integer_zerop (arg), type);
3665 case TRUTH_AND_EXPR:
3666 return build2 (TRUTH_OR_EXPR, type,
3667 invert_truthvalue (TREE_OPERAND (arg, 0)),
3668 invert_truthvalue (TREE_OPERAND (arg, 1)));
3671 return build2 (TRUTH_AND_EXPR, type,
3672 invert_truthvalue (TREE_OPERAND (arg, 0)),
3673 invert_truthvalue (TREE_OPERAND (arg, 1)));
3675 case TRUTH_XOR_EXPR:
3676 /* Here we can invert either operand. We invert the first operand
3677 unless the second operand is a TRUTH_NOT_EXPR in which case our
3678 result is the XOR of the first operand with the inside of the
3679 negation of the second operand. */
3681 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3682 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3683 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3685 return build2 (TRUTH_XOR_EXPR, type,
3686 invert_truthvalue (TREE_OPERAND (arg, 0)),
3687 TREE_OPERAND (arg, 1));
3689 case TRUTH_ANDIF_EXPR:
3690 return build2 (TRUTH_ORIF_EXPR, type,
3691 invert_truthvalue (TREE_OPERAND (arg, 0)),
3692 invert_truthvalue (TREE_OPERAND (arg, 1)));
3694 case TRUTH_ORIF_EXPR:
3695 return build2 (TRUTH_ANDIF_EXPR, type,
3696 invert_truthvalue (TREE_OPERAND (arg, 0)),
3697 invert_truthvalue (TREE_OPERAND (arg, 1)));
3699 case TRUTH_NOT_EXPR:
3700 return TREE_OPERAND (arg, 0);
3704 tree arg1 = TREE_OPERAND (arg, 1);
3705 tree arg2 = TREE_OPERAND (arg, 2);
3706 /* A COND_EXPR may have a throw as one operand, which
3707 then has void type. Just leave void operands
3709 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3710 VOID_TYPE_P (TREE_TYPE (arg1))
3711 ? arg1 : invert_truthvalue (arg1),
3712 VOID_TYPE_P (TREE_TYPE (arg2))
3713 ? arg2 : invert_truthvalue (arg2));
3717 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3718 invert_truthvalue (TREE_OPERAND (arg, 1)));
3720 case NON_LVALUE_EXPR:
3721 return invert_truthvalue (TREE_OPERAND (arg, 0));
3724 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3725 return build1 (TRUTH_NOT_EXPR, type, arg);
3729 return build1 (TREE_CODE (arg), type,
3730 invert_truthvalue (TREE_OPERAND (arg, 0)));
3733 if (!integer_onep (TREE_OPERAND (arg, 1)))
3735 return build2 (EQ_EXPR, type, arg,
3736 build_int_cst (type, 0));
3739 return build1 (TRUTH_NOT_EXPR, type, arg);
3741 case CLEANUP_POINT_EXPR:
3742 return build1 (CLEANUP_POINT_EXPR, type,
3743 invert_truthvalue (TREE_OPERAND (arg, 0)));
3752 /* Return a simplified tree node for the truth-negation of ARG. This
3753 never alters ARG itself. We assume that ARG is an operation that
3754 returns a truth value (0 or 1).
3756 FIXME: one would think we would fold the result, but it causes
3757 problems with the dominator optimizer. */
3760 invert_truthvalue (tree arg)
3764 if (TREE_CODE (arg) == ERROR_MARK)
3767 tem = fold_truth_not_expr (arg);
3769 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3774 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3775 operands are another bit-wise operation with a common input. If so,
3776 distribute the bit operations to save an operation and possibly two if
3777 constants are involved. For example, convert
3778 (A | B) & (A | C) into A | (B & C)
3779 Further simplification will occur if B and C are constants.
3781 If this optimization cannot be done, 0 will be returned. */
3784 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3789 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3790 || TREE_CODE (arg0) == code
3791 || (TREE_CODE (arg0) != BIT_AND_EXPR
3792 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3795 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3797 common = TREE_OPERAND (arg0, 0);
3798 left = TREE_OPERAND (arg0, 1);
3799 right = TREE_OPERAND (arg1, 1);
3801 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3803 common = TREE_OPERAND (arg0, 0);
3804 left = TREE_OPERAND (arg0, 1);
3805 right = TREE_OPERAND (arg1, 0);
3807 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3809 common = TREE_OPERAND (arg0, 1);
3810 left = TREE_OPERAND (arg0, 0);
3811 right = TREE_OPERAND (arg1, 1);
3813 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3815 common = TREE_OPERAND (arg0, 1);
3816 left = TREE_OPERAND (arg0, 0);
3817 right = TREE_OPERAND (arg1, 0);
3822 return fold_build2 (TREE_CODE (arg0), type, common,
3823 fold_build2 (code, type, left, right));
3826 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3827 with code CODE. This optimization is unsafe. */
3829 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3831 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3832 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3834 /* (A / C) +- (B / C) -> (A +- B) / C. */
3836 && operand_equal_p (TREE_OPERAND (arg0, 1),
3837 TREE_OPERAND (arg1, 1), 0))
3838 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3839 fold_build2 (code, type,
3840 TREE_OPERAND (arg0, 0),
3841 TREE_OPERAND (arg1, 0)),
3842 TREE_OPERAND (arg0, 1));
3844 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3845 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3846 TREE_OPERAND (arg1, 0), 0)
3847 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3848 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3850 REAL_VALUE_TYPE r0, r1;
3851 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3852 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3854 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3856 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3857 real_arithmetic (&r0, code, &r0, &r1);
3858 return fold_build2 (MULT_EXPR, type,
3859 TREE_OPERAND (arg0, 0),
3860 build_real (type, r0));
3866 /* Subroutine for fold_truthop: decode a field reference.
3868 If EXP is a comparison reference, we return the innermost reference.
3870 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3871 set to the starting bit number.
3873 If the innermost field can be completely contained in a mode-sized
3874 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3876 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3877 otherwise it is not changed.
3879 *PUNSIGNEDP is set to the signedness of the field.
3881 *PMASK is set to the mask used. This is either contained in a
3882 BIT_AND_EXPR or derived from the width of the field.
3884 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3886 Return 0 if this is not a component reference or is one that we can't
3887 do anything with. */
3890 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3891 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3892 int *punsignedp, int *pvolatilep,
3893 tree *pmask, tree *pand_mask)
3895 tree outer_type = 0;
3897 tree mask, inner, offset;
3899 unsigned int precision;
3901 /* All the optimizations using this function assume integer fields.
3902 There are problems with FP fields since the type_for_size call
3903 below can fail for, e.g., XFmode. */
3904 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3907 /* We are interested in the bare arrangement of bits, so strip everything
3908 that doesn't affect the machine mode. However, record the type of the
3909 outermost expression if it may matter below. */
3910 if (CONVERT_EXPR_P (exp)
3911 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3912 outer_type = TREE_TYPE (exp);
3915 if (TREE_CODE (exp) == BIT_AND_EXPR)
3917 and_mask = TREE_OPERAND (exp, 1);
3918 exp = TREE_OPERAND (exp, 0);
3919 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3920 if (TREE_CODE (and_mask) != INTEGER_CST)
3924 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3925 punsignedp, pvolatilep, false);
3926 if ((inner == exp && and_mask == 0)
3927 || *pbitsize < 0 || offset != 0
3928 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3931 /* If the number of bits in the reference is the same as the bitsize of
3932 the outer type, then the outer type gives the signedness. Otherwise
3933 (in case of a small bitfield) the signedness is unchanged. */
3934 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3935 *punsignedp = TYPE_UNSIGNED (outer_type);
3937 /* Compute the mask to access the bitfield. */
3938 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3939 precision = TYPE_PRECISION (unsigned_type);
3941 mask = build_int_cst_type (unsigned_type, -1);
3943 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3944 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3946 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3948 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3949 fold_convert (unsigned_type, and_mask), mask);
3952 *pand_mask = and_mask;
3956 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3957 represents the sign bit of EXP's type. If EXP represents a sign
3958 or zero extension, also test VAL against the unextended type.
3959 The return value is the (sub)expression whose sign bit is VAL,
3960 or NULL_TREE otherwise. */
3963 sign_bit_p (tree exp, const_tree val)
3965 unsigned HOST_WIDE_INT mask_lo, lo;
3966 HOST_WIDE_INT mask_hi, hi;
3970 /* Tree EXP must have an integral type. */
3971 t = TREE_TYPE (exp);
3972 if (! INTEGRAL_TYPE_P (t))
3975 /* Tree VAL must be an integer constant. */
3976 if (TREE_CODE (val) != INTEGER_CST
3977 || TREE_OVERFLOW (val))
3980 width = TYPE_PRECISION (t);
3981 if (width > HOST_BITS_PER_WIDE_INT)
3983 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3986 mask_hi = ((unsigned HOST_WIDE_INT) -1
3987 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3993 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3996 mask_lo = ((unsigned HOST_WIDE_INT) -1
3997 >> (HOST_BITS_PER_WIDE_INT - width));
4000 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4001 treat VAL as if it were unsigned. */
4002 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4003 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4006 /* Handle extension from a narrower type. */
4007 if (TREE_CODE (exp) == NOP_EXPR
4008 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4009 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4014 /* Subroutine for fold_truthop: determine if an operand is simple enough
4015 to be evaluated unconditionally. */
4018 simple_operand_p (const_tree exp)
4020 /* Strip any conversions that don't change the machine mode. */
4023 return (CONSTANT_CLASS_P (exp)
4024 || TREE_CODE (exp) == SSA_NAME
4026 && ! TREE_ADDRESSABLE (exp)
4027 && ! TREE_THIS_VOLATILE (exp)
4028 && ! DECL_NONLOCAL (exp)
4029 /* Don't regard global variables as simple. They may be
4030 allocated in ways unknown to the compiler (shared memory,
4031 #pragma weak, etc). */
4032 && ! TREE_PUBLIC (exp)
4033 && ! DECL_EXTERNAL (exp)
4034 /* Loading a static variable is unduly expensive, but global
4035 registers aren't expensive. */
4036 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4039 /* The following functions are subroutines to fold_range_test and allow it to
4040 try to change a logical combination of comparisons into a range test.
4043 X == 2 || X == 3 || X == 4 || X == 5
4047 (unsigned) (X - 2) <= 3
4049 We describe each set of comparisons as being either inside or outside
4050 a range, using a variable named like IN_P, and then describe the
4051 range with a lower and upper bound. If one of the bounds is omitted,
4052 it represents either the highest or lowest value of the type.
4054 In the comments below, we represent a range by two numbers in brackets
4055 preceded by a "+" to designate being inside that range, or a "-" to
4056 designate being outside that range, so the condition can be inverted by
4057 flipping the prefix. An omitted bound is represented by a "-". For
4058 example, "- [-, 10]" means being outside the range starting at the lowest
4059 possible value and ending at 10, in other words, being greater than 10.
4060 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4063 We set up things so that the missing bounds are handled in a consistent
4064 manner so neither a missing bound nor "true" and "false" need to be
4065 handled using a special case. */
4067 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4068 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4069 and UPPER1_P are nonzero if the respective argument is an upper bound
4070 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4071 must be specified for a comparison. ARG1 will be converted to ARG0's
4072 type if both are specified. */
4075 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4076 tree arg1, int upper1_p)
4082 /* If neither arg represents infinity, do the normal operation.
4083 Else, if not a comparison, return infinity. Else handle the special
4084 comparison rules. Note that most of the cases below won't occur, but
4085 are handled for consistency. */
4087 if (arg0 != 0 && arg1 != 0)
4089 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4090 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4092 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4095 if (TREE_CODE_CLASS (code) != tcc_comparison)
4098 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4099 for neither. In real maths, we cannot assume open ended ranges are
4100 the same. But, this is computer arithmetic, where numbers are finite.
4101 We can therefore make the transformation of any unbounded range with
4102 the value Z, Z being greater than any representable number. This permits
4103 us to treat unbounded ranges as equal. */
4104 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4105 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4109 result = sgn0 == sgn1;
4112 result = sgn0 != sgn1;
4115 result = sgn0 < sgn1;
4118 result = sgn0 <= sgn1;
4121 result = sgn0 > sgn1;
4124 result = sgn0 >= sgn1;
4130 return constant_boolean_node (result, type);
4133 /* Given EXP, a logical expression, set the range it is testing into
4134 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4135 actually being tested. *PLOW and *PHIGH will be made of the same
4136 type as the returned expression. If EXP is not a comparison, we
4137 will most likely not be returning a useful value and range. Set
4138 *STRICT_OVERFLOW_P to true if the return value is only valid
4139 because signed overflow is undefined; otherwise, do not change
4140 *STRICT_OVERFLOW_P. */
4143 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4144 bool *strict_overflow_p)
4146 enum tree_code code;
4147 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4148 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4150 tree low, high, n_low, n_high;
4152 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4153 and see if we can refine the range. Some of the cases below may not
4154 happen, but it doesn't seem worth worrying about this. We "continue"
4155 the outer loop when we've changed something; otherwise we "break"
4156 the switch, which will "break" the while. */
4159 low = high = build_int_cst (TREE_TYPE (exp), 0);
4163 code = TREE_CODE (exp);
4164 exp_type = TREE_TYPE (exp);
4166 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4168 if (TREE_OPERAND_LENGTH (exp) > 0)
4169 arg0 = TREE_OPERAND (exp, 0);
4170 if (TREE_CODE_CLASS (code) == tcc_comparison
4171 || TREE_CODE_CLASS (code) == tcc_unary
4172 || TREE_CODE_CLASS (code) == tcc_binary)
4173 arg0_type = TREE_TYPE (arg0);
4174 if (TREE_CODE_CLASS (code) == tcc_binary
4175 || TREE_CODE_CLASS (code) == tcc_comparison
4176 || (TREE_CODE_CLASS (code) == tcc_expression
4177 && TREE_OPERAND_LENGTH (exp) > 1))
4178 arg1 = TREE_OPERAND (exp, 1);
4183 case TRUTH_NOT_EXPR:
4184 in_p = ! in_p, exp = arg0;
4187 case EQ_EXPR: case NE_EXPR:
4188 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4189 /* We can only do something if the range is testing for zero
4190 and if the second operand is an integer constant. Note that
4191 saying something is "in" the range we make is done by
4192 complementing IN_P since it will set in the initial case of
4193 being not equal to zero; "out" is leaving it alone. */
4194 if (low == 0 || high == 0
4195 || ! integer_zerop (low) || ! integer_zerop (high)
4196 || TREE_CODE (arg1) != INTEGER_CST)
4201 case NE_EXPR: /* - [c, c] */
4204 case EQ_EXPR: /* + [c, c] */
4205 in_p = ! in_p, low = high = arg1;
4207 case GT_EXPR: /* - [-, c] */
4208 low = 0, high = arg1;
4210 case GE_EXPR: /* + [c, -] */
4211 in_p = ! in_p, low = arg1, high = 0;
4213 case LT_EXPR: /* - [c, -] */
4214 low = arg1, high = 0;
4216 case LE_EXPR: /* + [-, c] */
4217 in_p = ! in_p, low = 0, high = arg1;
4223 /* If this is an unsigned comparison, we also know that EXP is
4224 greater than or equal to zero. We base the range tests we make
4225 on that fact, so we record it here so we can parse existing
4226 range tests. We test arg0_type since often the return type
4227 of, e.g. EQ_EXPR, is boolean. */
4228 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4230 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4232 build_int_cst (arg0_type, 0),
4236 in_p = n_in_p, low = n_low, high = n_high;
4238 /* If the high bound is missing, but we have a nonzero low
4239 bound, reverse the range so it goes from zero to the low bound
4241 if (high == 0 && low && ! integer_zerop (low))
4244 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4245 integer_one_node, 0);
4246 low = build_int_cst (arg0_type, 0);
4254 /* (-x) IN [a,b] -> x in [-b, -a] */
4255 n_low = range_binop (MINUS_EXPR, exp_type,
4256 build_int_cst (exp_type, 0),
4258 n_high = range_binop (MINUS_EXPR, exp_type,
4259 build_int_cst (exp_type, 0),
4261 low = n_low, high = n_high;
4267 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4268 build_int_cst (exp_type, 1));
4271 case PLUS_EXPR: case MINUS_EXPR:
4272 if (TREE_CODE (arg1) != INTEGER_CST)
4275 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4276 move a constant to the other side. */
4277 if (!TYPE_UNSIGNED (arg0_type)
4278 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4281 /* If EXP is signed, any overflow in the computation is undefined,
4282 so we don't worry about it so long as our computations on
4283 the bounds don't overflow. For unsigned, overflow is defined
4284 and this is exactly the right thing. */
4285 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4286 arg0_type, low, 0, arg1, 0);
4287 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4288 arg0_type, high, 1, arg1, 0);
4289 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4290 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4293 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4294 *strict_overflow_p = true;
4296 /* Check for an unsigned range which has wrapped around the maximum
4297 value thus making n_high < n_low, and normalize it. */
4298 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4300 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4301 integer_one_node, 0);
4302 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4303 integer_one_node, 0);
4305 /* If the range is of the form +/- [ x+1, x ], we won't
4306 be able to normalize it. But then, it represents the
4307 whole range or the empty set, so make it
4309 if (tree_int_cst_equal (n_low, low)
4310 && tree_int_cst_equal (n_high, high))
4316 low = n_low, high = n_high;
4321 CASE_CONVERT: case NON_LVALUE_EXPR:
4322 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4325 if (! INTEGRAL_TYPE_P (arg0_type)
4326 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4327 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4330 n_low = low, n_high = high;
4333 n_low = fold_convert (arg0_type, n_low);
4336 n_high = fold_convert (arg0_type, n_high);
4339 /* If we're converting arg0 from an unsigned type, to exp,
4340 a signed type, we will be doing the comparison as unsigned.
4341 The tests above have already verified that LOW and HIGH
4344 So we have to ensure that we will handle large unsigned
4345 values the same way that the current signed bounds treat
4348 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4352 /* For fixed-point modes, we need to pass the saturating flag
4353 as the 2nd parameter. */
4354 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4355 equiv_type = lang_hooks.types.type_for_mode
4356 (TYPE_MODE (arg0_type),
4357 TYPE_SATURATING (arg0_type));
4359 equiv_type = lang_hooks.types.type_for_mode
4360 (TYPE_MODE (arg0_type), 1);
4362 /* A range without an upper bound is, naturally, unbounded.
4363 Since convert would have cropped a very large value, use
4364 the max value for the destination type. */
4366 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4367 : TYPE_MAX_VALUE (arg0_type);
4369 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4370 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4371 fold_convert (arg0_type,
4373 build_int_cst (arg0_type, 1));
4375 /* If the low bound is specified, "and" the range with the
4376 range for which the original unsigned value will be
4380 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4381 1, n_low, n_high, 1,
4382 fold_convert (arg0_type,
4387 in_p = (n_in_p == in_p);
4391 /* Otherwise, "or" the range with the range of the input
4392 that will be interpreted as negative. */
4393 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4394 0, n_low, n_high, 1,
4395 fold_convert (arg0_type,
4400 in_p = (in_p != n_in_p);
4405 low = n_low, high = n_high;
4415 /* If EXP is a constant, we can evaluate whether this is true or false. */
4416 if (TREE_CODE (exp) == INTEGER_CST)
4418 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4420 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4426 *pin_p = in_p, *plow = low, *phigh = high;
4430 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4431 type, TYPE, return an expression to test if EXP is in (or out of, depending
4432 on IN_P) the range. Return 0 if the test couldn't be created. */
4435 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4437 tree etype = TREE_TYPE (exp);
4440 #ifdef HAVE_canonicalize_funcptr_for_compare
4441 /* Disable this optimization for function pointer expressions
4442 on targets that require function pointer canonicalization. */
4443 if (HAVE_canonicalize_funcptr_for_compare
4444 && TREE_CODE (etype) == POINTER_TYPE
4445 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4451 value = build_range_check (type, exp, 1, low, high);
4453 return invert_truthvalue (value);
4458 if (low == 0 && high == 0)
4459 return build_int_cst (type, 1);
4462 return fold_build2 (LE_EXPR, type, exp,
4463 fold_convert (etype, high));
4466 return fold_build2 (GE_EXPR, type, exp,
4467 fold_convert (etype, low));
4469 if (operand_equal_p (low, high, 0))
4470 return fold_build2 (EQ_EXPR, type, exp,
4471 fold_convert (etype, low));
4473 if (integer_zerop (low))
4475 if (! TYPE_UNSIGNED (etype))
4477 etype = unsigned_type_for (etype);
4478 high = fold_convert (etype, high);
4479 exp = fold_convert (etype, exp);
4481 return build_range_check (type, exp, 1, 0, high);
4484 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4485 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4487 unsigned HOST_WIDE_INT lo;
4491 prec = TYPE_PRECISION (etype);
4492 if (prec <= HOST_BITS_PER_WIDE_INT)
4495 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4499 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4500 lo = (unsigned HOST_WIDE_INT) -1;
4503 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4505 if (TYPE_UNSIGNED (etype))
4507 etype = signed_type_for (etype);
4508 exp = fold_convert (etype, exp);
4510 return fold_build2 (GT_EXPR, type, exp,
4511 build_int_cst (etype, 0));
4515 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4516 This requires wrap-around arithmetics for the type of the expression. */
4517 switch (TREE_CODE (etype))
4520 /* There is no requirement that LOW be within the range of ETYPE
4521 if the latter is a subtype. It must, however, be within the base
4522 type of ETYPE. So be sure we do the subtraction in that type. */
4523 if (TREE_TYPE (etype))
4524 etype = TREE_TYPE (etype);
4529 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4530 TYPE_UNSIGNED (etype));
4537 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4538 if (TREE_CODE (etype) == INTEGER_TYPE
4539 && !TYPE_OVERFLOW_WRAPS (etype))
4541 tree utype, minv, maxv;
4543 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4544 for the type in question, as we rely on this here. */
4545 utype = unsigned_type_for (etype);
4546 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4547 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4548 integer_one_node, 1);
4549 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4551 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4558 high = fold_convert (etype, high);
4559 low = fold_convert (etype, low);
4560 exp = fold_convert (etype, exp);
4562 value = const_binop (MINUS_EXPR, high, low, 0);
4565 if (POINTER_TYPE_P (etype))
4567 if (value != 0 && !TREE_OVERFLOW (value))
4569 low = fold_convert (sizetype, low);
4570 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4571 return build_range_check (type,
4572 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4573 1, build_int_cst (etype, 0), value);
4578 if (value != 0 && !TREE_OVERFLOW (value))
4579 return build_range_check (type,
4580 fold_build2 (MINUS_EXPR, etype, exp, low),
4581 1, build_int_cst (etype, 0), value);
4586 /* Return the predecessor of VAL in its type, handling the infinite case. */
4589 range_predecessor (tree val)
4591 tree type = TREE_TYPE (val);
4593 if (INTEGRAL_TYPE_P (type)
4594 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4597 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4600 /* Return the successor of VAL in its type, handling the infinite case. */
4603 range_successor (tree val)
4605 tree type = TREE_TYPE (val);
4607 if (INTEGRAL_TYPE_P (type)
4608 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4611 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4614 /* Given two ranges, see if we can merge them into one. Return 1 if we
4615 can, 0 if we can't. Set the output range into the specified parameters. */
4618 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4619 tree high0, int in1_p, tree low1, tree high1)
4627 int lowequal = ((low0 == 0 && low1 == 0)
4628 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4629 low0, 0, low1, 0)));
4630 int highequal = ((high0 == 0 && high1 == 0)
4631 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4632 high0, 1, high1, 1)));
4634 /* Make range 0 be the range that starts first, or ends last if they
4635 start at the same value. Swap them if it isn't. */
4636 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4639 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4640 high1, 1, high0, 1))))
4642 temp = in0_p, in0_p = in1_p, in1_p = temp;
4643 tem = low0, low0 = low1, low1 = tem;
4644 tem = high0, high0 = high1, high1 = tem;
4647 /* Now flag two cases, whether the ranges are disjoint or whether the
4648 second range is totally subsumed in the first. Note that the tests
4649 below are simplified by the ones above. */
4650 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4651 high0, 1, low1, 0));
4652 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4653 high1, 1, high0, 1));
4655 /* We now have four cases, depending on whether we are including or
4656 excluding the two ranges. */
4659 /* If they don't overlap, the result is false. If the second range
4660 is a subset it is the result. Otherwise, the range is from the start
4661 of the second to the end of the first. */
4663 in_p = 0, low = high = 0;
4665 in_p = 1, low = low1, high = high1;
4667 in_p = 1, low = low1, high = high0;
4670 else if (in0_p && ! in1_p)
4672 /* If they don't overlap, the result is the first range. If they are
4673 equal, the result is false. If the second range is a subset of the
4674 first, and the ranges begin at the same place, we go from just after
4675 the end of the second range to the end of the first. If the second
4676 range is not a subset of the first, or if it is a subset and both
4677 ranges end at the same place, the range starts at the start of the
4678 first range and ends just before the second range.
4679 Otherwise, we can't describe this as a single range. */
4681 in_p = 1, low = low0, high = high0;
4682 else if (lowequal && highequal)
4683 in_p = 0, low = high = 0;
4684 else if (subset && lowequal)
4686 low = range_successor (high1);
4691 /* We are in the weird situation where high0 > high1 but
4692 high1 has no successor. Punt. */
4696 else if (! subset || highequal)
4699 high = range_predecessor (low1);
4703 /* low0 < low1 but low1 has no predecessor. Punt. */
4711 else if (! in0_p && in1_p)
4713 /* If they don't overlap, the result is the second range. If the second
4714 is a subset of the first, the result is false. Otherwise,
4715 the range starts just after the first range and ends at the
4716 end of the second. */
4718 in_p = 1, low = low1, high = high1;
4719 else if (subset || highequal)
4720 in_p = 0, low = high = 0;
4723 low = range_successor (high0);
4728 /* high1 > high0 but high0 has no successor. Punt. */
4736 /* The case where we are excluding both ranges. Here the complex case
4737 is if they don't overlap. In that case, the only time we have a
4738 range is if they are adjacent. If the second is a subset of the
4739 first, the result is the first. Otherwise, the range to exclude
4740 starts at the beginning of the first range and ends at the end of the
4744 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4745 range_successor (high0),
4747 in_p = 0, low = low0, high = high1;
4750 /* Canonicalize - [min, x] into - [-, x]. */
4751 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4752 switch (TREE_CODE (TREE_TYPE (low0)))
4755 if (TYPE_PRECISION (TREE_TYPE (low0))
4756 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4760 if (tree_int_cst_equal (low0,
4761 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4765 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4766 && integer_zerop (low0))
4773 /* Canonicalize - [x, max] into - [x, -]. */
4774 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4775 switch (TREE_CODE (TREE_TYPE (high1)))
4778 if (TYPE_PRECISION (TREE_TYPE (high1))
4779 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4783 if (tree_int_cst_equal (high1,
4784 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4788 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4789 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4791 integer_one_node, 1)))
4798 /* The ranges might be also adjacent between the maximum and
4799 minimum values of the given type. For
4800 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4801 return + [x + 1, y - 1]. */
4802 if (low0 == 0 && high1 == 0)
4804 low = range_successor (high0);
4805 high = range_predecessor (low1);
4806 if (low == 0 || high == 0)
4816 in_p = 0, low = low0, high = high0;
4818 in_p = 0, low = low0, high = high1;
4821 *pin_p = in_p, *plow = low, *phigh = high;
4826 /* Subroutine of fold, looking inside expressions of the form
4827 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4828 of the COND_EXPR. This function is being used also to optimize
4829 A op B ? C : A, by reversing the comparison first.
4831 Return a folded expression whose code is not a COND_EXPR
4832 anymore, or NULL_TREE if no folding opportunity is found. */
4835 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4837 enum tree_code comp_code = TREE_CODE (arg0);
4838 tree arg00 = TREE_OPERAND (arg0, 0);
4839 tree arg01 = TREE_OPERAND (arg0, 1);
4840 tree arg1_type = TREE_TYPE (arg1);
4846 /* If we have A op 0 ? A : -A, consider applying the following
4849 A == 0? A : -A same as -A
4850 A != 0? A : -A same as A
4851 A >= 0? A : -A same as abs (A)
4852 A > 0? A : -A same as abs (A)
4853 A <= 0? A : -A same as -abs (A)
4854 A < 0? A : -A same as -abs (A)
4856 None of these transformations work for modes with signed
4857 zeros. If A is +/-0, the first two transformations will
4858 change the sign of the result (from +0 to -0, or vice
4859 versa). The last four will fix the sign of the result,
4860 even though the original expressions could be positive or
4861 negative, depending on the sign of A.
4863 Note that all these transformations are correct if A is
4864 NaN, since the two alternatives (A and -A) are also NaNs. */
4865 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4866 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4867 ? real_zerop (arg01)
4868 : integer_zerop (arg01))
4869 && ((TREE_CODE (arg2) == NEGATE_EXPR
4870 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4871 /* In the case that A is of the form X-Y, '-A' (arg2) may
4872 have already been folded to Y-X, check for that. */
4873 || (TREE_CODE (arg1) == MINUS_EXPR
4874 && TREE_CODE (arg2) == MINUS_EXPR
4875 && operand_equal_p (TREE_OPERAND (arg1, 0),
4876 TREE_OPERAND (arg2, 1), 0)
4877 && operand_equal_p (TREE_OPERAND (arg1, 1),
4878 TREE_OPERAND (arg2, 0), 0))))
4883 tem = fold_convert (arg1_type, arg1);
4884 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4887 return pedantic_non_lvalue (fold_convert (type, arg1));
4890 if (flag_trapping_math)
4895 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4896 arg1 = fold_convert (signed_type_for
4897 (TREE_TYPE (arg1)), arg1);
4898 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4899 return pedantic_non_lvalue (fold_convert (type, tem));
4902 if (flag_trapping_math)
4906 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4907 arg1 = fold_convert (signed_type_for
4908 (TREE_TYPE (arg1)), arg1);
4909 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4910 return negate_expr (fold_convert (type, tem));
4912 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4916 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4917 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4918 both transformations are correct when A is NaN: A != 0
4919 is then true, and A == 0 is false. */
4921 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4922 && integer_zerop (arg01) && integer_zerop (arg2))
4924 if (comp_code == NE_EXPR)
4925 return pedantic_non_lvalue (fold_convert (type, arg1));
4926 else if (comp_code == EQ_EXPR)
4927 return build_int_cst (type, 0);
4930 /* Try some transformations of A op B ? A : B.
4932 A == B? A : B same as B
4933 A != B? A : B same as A
4934 A >= B? A : B same as max (A, B)
4935 A > B? A : B same as max (B, A)
4936 A <= B? A : B same as min (A, B)
4937 A < B? A : B same as min (B, A)
4939 As above, these transformations don't work in the presence
4940 of signed zeros. For example, if A and B are zeros of
4941 opposite sign, the first two transformations will change
4942 the sign of the result. In the last four, the original
4943 expressions give different results for (A=+0, B=-0) and
4944 (A=-0, B=+0), but the transformed expressions do not.
4946 The first two transformations are correct if either A or B
4947 is a NaN. In the first transformation, the condition will
4948 be false, and B will indeed be chosen. In the case of the
4949 second transformation, the condition A != B will be true,
4950 and A will be chosen.
4952 The conversions to max() and min() are not correct if B is
4953 a number and A is not. The conditions in the original
4954 expressions will be false, so all four give B. The min()
4955 and max() versions would give a NaN instead. */
4956 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4957 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4958 /* Avoid these transformations if the COND_EXPR may be used
4959 as an lvalue in the C++ front-end. PR c++/19199. */
4961 || (strcmp (lang_hooks.name, "GNU C++") != 0
4962 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4963 || ! maybe_lvalue_p (arg1)
4964 || ! maybe_lvalue_p (arg2)))
4966 tree comp_op0 = arg00;
4967 tree comp_op1 = arg01;
4968 tree comp_type = TREE_TYPE (comp_op0);
4970 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4971 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4981 return pedantic_non_lvalue (fold_convert (type, arg2));
4983 return pedantic_non_lvalue (fold_convert (type, arg1));
4988 /* In C++ a ?: expression can be an lvalue, so put the
4989 operand which will be used if they are equal first
4990 so that we can convert this back to the
4991 corresponding COND_EXPR. */
4992 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4994 comp_op0 = fold_convert (comp_type, comp_op0);
4995 comp_op1 = fold_convert (comp_type, comp_op1);
4996 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4997 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4998 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4999 return pedantic_non_lvalue (fold_convert (type, tem));
5006 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5008 comp_op0 = fold_convert (comp_type, comp_op0);
5009 comp_op1 = fold_convert (comp_type, comp_op1);
5010 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5011 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5012 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5013 return pedantic_non_lvalue (fold_convert (type, tem));
5017 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5018 return pedantic_non_lvalue (fold_convert (type, arg2));
5021 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5022 return pedantic_non_lvalue (fold_convert (type, arg1));
5025 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5030 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5031 we might still be able to simplify this. For example,
5032 if C1 is one less or one more than C2, this might have started
5033 out as a MIN or MAX and been transformed by this function.
5034 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5036 if (INTEGRAL_TYPE_P (type)
5037 && TREE_CODE (arg01) == INTEGER_CST
5038 && TREE_CODE (arg2) == INTEGER_CST)
5042 /* We can replace A with C1 in this case. */
5043 arg1 = fold_convert (type, arg01);
5044 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5047 /* If C1 is C2 + 1, this is min(A, C2). */
5048 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5050 && operand_equal_p (arg01,
5051 const_binop (PLUS_EXPR, arg2,
5052 build_int_cst (type, 1), 0),
5054 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5056 fold_convert (type, arg1),
5061 /* If C1 is C2 - 1, this is min(A, C2). */
5062 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5064 && operand_equal_p (arg01,
5065 const_binop (MINUS_EXPR, arg2,
5066 build_int_cst (type, 1), 0),
5068 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5070 fold_convert (type, arg1),
5075 /* If C1 is C2 - 1, this is max(A, C2). */
5076 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5078 && operand_equal_p (arg01,
5079 const_binop (MINUS_EXPR, arg2,
5080 build_int_cst (type, 1), 0),
5082 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5084 fold_convert (type, arg1),
5089 /* If C1 is C2 + 1, this is max(A, C2). */
5090 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5092 && operand_equal_p (arg01,
5093 const_binop (PLUS_EXPR, arg2,
5094 build_int_cst (type, 1), 0),
5096 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5098 fold_convert (type, arg1),
5112 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5113 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5116 /* EXP is some logical combination of boolean tests. See if we can
5117 merge it into some range test. Return the new tree if so. */
5120 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5122 int or_op = (code == TRUTH_ORIF_EXPR
5123 || code == TRUTH_OR_EXPR);
5124 int in0_p, in1_p, in_p;
5125 tree low0, low1, low, high0, high1, high;
5126 bool strict_overflow_p = false;
5127 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5128 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5130 const char * const warnmsg = G_("assuming signed overflow does not occur "
5131 "when simplifying range test");
5133 /* If this is an OR operation, invert both sides; we will invert
5134 again at the end. */
5136 in0_p = ! in0_p, in1_p = ! in1_p;
5138 /* If both expressions are the same, if we can merge the ranges, and we
5139 can build the range test, return it or it inverted. If one of the
5140 ranges is always true or always false, consider it to be the same
5141 expression as the other. */
5142 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5143 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5145 && 0 != (tem = (build_range_check (type,
5147 : rhs != 0 ? rhs : integer_zero_node,
5150 if (strict_overflow_p)
5151 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5152 return or_op ? invert_truthvalue (tem) : tem;
5155 /* On machines where the branch cost is expensive, if this is a
5156 short-circuited branch and the underlying object on both sides
5157 is the same, make a non-short-circuit operation. */
5158 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5159 && lhs != 0 && rhs != 0
5160 && (code == TRUTH_ANDIF_EXPR
5161 || code == TRUTH_ORIF_EXPR)
5162 && operand_equal_p (lhs, rhs, 0))
5164 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5165 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5166 which cases we can't do this. */
5167 if (simple_operand_p (lhs))
5168 return build2 (code == TRUTH_ANDIF_EXPR
5169 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5172 else if (lang_hooks.decls.global_bindings_p () == 0
5173 && ! CONTAINS_PLACEHOLDER_P (lhs))
5175 tree common = save_expr (lhs);
5177 if (0 != (lhs = build_range_check (type, common,
5178 or_op ? ! in0_p : in0_p,
5180 && (0 != (rhs = build_range_check (type, common,
5181 or_op ? ! in1_p : in1_p,
5184 if (strict_overflow_p)
5185 fold_overflow_warning (warnmsg,
5186 WARN_STRICT_OVERFLOW_COMPARISON);
5187 return build2 (code == TRUTH_ANDIF_EXPR
5188 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5197 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5198 bit value. Arrange things so the extra bits will be set to zero if and
5199 only if C is signed-extended to its full width. If MASK is nonzero,
5200 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5203 unextend (tree c, int p, int unsignedp, tree mask)
5205 tree type = TREE_TYPE (c);
5206 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5209 if (p == modesize || unsignedp)
5212 /* We work by getting just the sign bit into the low-order bit, then
5213 into the high-order bit, then sign-extend. We then XOR that value
5215 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5216 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5218 /* We must use a signed type in order to get an arithmetic right shift.
5219 However, we must also avoid introducing accidental overflows, so that
5220 a subsequent call to integer_zerop will work. Hence we must
5221 do the type conversion here. At this point, the constant is either
5222 zero or one, and the conversion to a signed type can never overflow.
5223 We could get an overflow if this conversion is done anywhere else. */
5224 if (TYPE_UNSIGNED (type))
5225 temp = fold_convert (signed_type_for (type), temp);
5227 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5228 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5230 temp = const_binop (BIT_AND_EXPR, temp,
5231 fold_convert (TREE_TYPE (c), mask), 0);
5232 /* If necessary, convert the type back to match the type of C. */
5233 if (TYPE_UNSIGNED (type))
5234 temp = fold_convert (type, temp);
5236 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5239 /* Find ways of folding logical expressions of LHS and RHS:
5240 Try to merge two comparisons to the same innermost item.
5241 Look for range tests like "ch >= '0' && ch <= '9'".
5242 Look for combinations of simple terms on machines with expensive branches
5243 and evaluate the RHS unconditionally.
5245 For example, if we have p->a == 2 && p->b == 4 and we can make an
5246 object large enough to span both A and B, we can do this with a comparison
5247 against the object ANDed with the a mask.
5249 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5250 operations to do this with one comparison.
5252 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5253 function and the one above.
5255 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5256 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5258 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5261 We return the simplified tree or 0 if no optimization is possible. */
5264 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5266 /* If this is the "or" of two comparisons, we can do something if
5267 the comparisons are NE_EXPR. If this is the "and", we can do something
5268 if the comparisons are EQ_EXPR. I.e.,
5269 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5271 WANTED_CODE is this operation code. For single bit fields, we can
5272 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5273 comparison for one-bit fields. */
5275 enum tree_code wanted_code;
5276 enum tree_code lcode, rcode;
5277 tree ll_arg, lr_arg, rl_arg, rr_arg;
5278 tree ll_inner, lr_inner, rl_inner, rr_inner;
5279 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5280 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5281 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5282 HOST_WIDE_INT lnbitsize, lnbitpos;
5283 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5284 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5285 enum machine_mode lnmode;
5286 tree ll_mask, lr_mask, rl_mask, rr_mask;
5287 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5288 tree l_const, r_const;
5289 tree lntype, result;
5290 int first_bit, end_bit;
5292 tree orig_lhs = lhs, orig_rhs = rhs;
5293 enum tree_code orig_code = code;
5295 /* Start by getting the comparison codes. Fail if anything is volatile.
5296 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5297 it were surrounded with a NE_EXPR. */
5299 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5302 lcode = TREE_CODE (lhs);
5303 rcode = TREE_CODE (rhs);
5305 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5307 lhs = build2 (NE_EXPR, truth_type, lhs,
5308 build_int_cst (TREE_TYPE (lhs), 0));
5312 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5314 rhs = build2 (NE_EXPR, truth_type, rhs,
5315 build_int_cst (TREE_TYPE (rhs), 0));
5319 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5320 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5323 ll_arg = TREE_OPERAND (lhs, 0);
5324 lr_arg = TREE_OPERAND (lhs, 1);
5325 rl_arg = TREE_OPERAND (rhs, 0);
5326 rr_arg = TREE_OPERAND (rhs, 1);
5328 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5329 if (simple_operand_p (ll_arg)
5330 && simple_operand_p (lr_arg))
5333 if (operand_equal_p (ll_arg, rl_arg, 0)
5334 && operand_equal_p (lr_arg, rr_arg, 0))
5336 result = combine_comparisons (code, lcode, rcode,
5337 truth_type, ll_arg, lr_arg);
5341 else if (operand_equal_p (ll_arg, rr_arg, 0)
5342 && operand_equal_p (lr_arg, rl_arg, 0))
5344 result = combine_comparisons (code, lcode,
5345 swap_tree_comparison (rcode),
5346 truth_type, ll_arg, lr_arg);
5352 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5353 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5355 /* If the RHS can be evaluated unconditionally and its operands are
5356 simple, it wins to evaluate the RHS unconditionally on machines
5357 with expensive branches. In this case, this isn't a comparison
5358 that can be merged. Avoid doing this if the RHS is a floating-point
5359 comparison since those can trap. */
5361 if (BRANCH_COST >= 2
5362 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5363 && simple_operand_p (rl_arg)
5364 && simple_operand_p (rr_arg))
5366 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5367 if (code == TRUTH_OR_EXPR
5368 && lcode == NE_EXPR && integer_zerop (lr_arg)
5369 && rcode == NE_EXPR && integer_zerop (rr_arg)
5370 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5371 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5372 return build2 (NE_EXPR, truth_type,
5373 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5375 build_int_cst (TREE_TYPE (ll_arg), 0));
5377 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5378 if (code == TRUTH_AND_EXPR
5379 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5380 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5381 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5382 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5383 return build2 (EQ_EXPR, truth_type,
5384 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5386 build_int_cst (TREE_TYPE (ll_arg), 0));
5388 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5390 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5391 return build2 (code, truth_type, lhs, rhs);
5396 /* See if the comparisons can be merged. Then get all the parameters for
5399 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5400 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5404 ll_inner = decode_field_reference (ll_arg,
5405 &ll_bitsize, &ll_bitpos, &ll_mode,
5406 &ll_unsignedp, &volatilep, &ll_mask,
5408 lr_inner = decode_field_reference (lr_arg,
5409 &lr_bitsize, &lr_bitpos, &lr_mode,
5410 &lr_unsignedp, &volatilep, &lr_mask,
5412 rl_inner = decode_field_reference (rl_arg,
5413 &rl_bitsize, &rl_bitpos, &rl_mode,
5414 &rl_unsignedp, &volatilep, &rl_mask,
5416 rr_inner = decode_field_reference (rr_arg,
5417 &rr_bitsize, &rr_bitpos, &rr_mode,
5418 &rr_unsignedp, &volatilep, &rr_mask,
5421 /* It must be true that the inner operation on the lhs of each
5422 comparison must be the same if we are to be able to do anything.
5423 Then see if we have constants. If not, the same must be true for
5425 if (volatilep || ll_inner == 0 || rl_inner == 0
5426 || ! operand_equal_p (ll_inner, rl_inner, 0))
5429 if (TREE_CODE (lr_arg) == INTEGER_CST
5430 && TREE_CODE (rr_arg) == INTEGER_CST)
5431 l_const = lr_arg, r_const = rr_arg;
5432 else if (lr_inner == 0 || rr_inner == 0
5433 || ! operand_equal_p (lr_inner, rr_inner, 0))
5436 l_const = r_const = 0;
5438 /* If either comparison code is not correct for our logical operation,
5439 fail. However, we can convert a one-bit comparison against zero into
5440 the opposite comparison against that bit being set in the field. */
5442 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5443 if (lcode != wanted_code)
5445 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5447 /* Make the left operand unsigned, since we are only interested
5448 in the value of one bit. Otherwise we are doing the wrong
5457 /* This is analogous to the code for l_const above. */
5458 if (rcode != wanted_code)
5460 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5469 /* See if we can find a mode that contains both fields being compared on
5470 the left. If we can't, fail. Otherwise, update all constants and masks
5471 to be relative to a field of that size. */
5472 first_bit = MIN (ll_bitpos, rl_bitpos);
5473 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5474 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5475 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5477 if (lnmode == VOIDmode)
5480 lnbitsize = GET_MODE_BITSIZE (lnmode);
5481 lnbitpos = first_bit & ~ (lnbitsize - 1);
5482 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5483 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5485 if (BYTES_BIG_ENDIAN)
5487 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5488 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5491 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5492 size_int (xll_bitpos), 0);
5493 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5494 size_int (xrl_bitpos), 0);
5498 l_const = fold_convert (lntype, l_const);
5499 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5500 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5501 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5502 fold_build1 (BIT_NOT_EXPR,
5506 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5508 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5513 r_const = fold_convert (lntype, r_const);
5514 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5515 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5516 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5517 fold_build1 (BIT_NOT_EXPR,
5521 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5523 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5527 /* Handle the case of comparisons with constants. If there is something in
5528 common between the masks, those bits of the constants must be the same.
5529 If not, the condition is always false. Test for this to avoid generating
5530 incorrect code below. */
5531 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5532 if (! integer_zerop (result)
5533 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5534 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5536 if (wanted_code == NE_EXPR)
5538 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5539 return constant_boolean_node (true, truth_type);
5543 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5544 return constant_boolean_node (false, truth_type);
5551 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5555 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5558 enum tree_code op_code;
5561 int consts_equal, consts_lt;
5564 STRIP_SIGN_NOPS (arg0);
5566 op_code = TREE_CODE (arg0);
5567 minmax_const = TREE_OPERAND (arg0, 1);
5568 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5569 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5570 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5571 inner = TREE_OPERAND (arg0, 0);
5573 /* If something does not permit us to optimize, return the original tree. */
5574 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5575 || TREE_CODE (comp_const) != INTEGER_CST
5576 || TREE_OVERFLOW (comp_const)
5577 || TREE_CODE (minmax_const) != INTEGER_CST
5578 || TREE_OVERFLOW (minmax_const))
5581 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5582 and GT_EXPR, doing the rest with recursive calls using logical
5586 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5588 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5591 return invert_truthvalue (tem);
5597 fold_build2 (TRUTH_ORIF_EXPR, type,
5598 optimize_minmax_comparison
5599 (EQ_EXPR, type, arg0, comp_const),
5600 optimize_minmax_comparison
5601 (GT_EXPR, type, arg0, comp_const));
5604 if (op_code == MAX_EXPR && consts_equal)
5605 /* MAX (X, 0) == 0 -> X <= 0 */
5606 return fold_build2 (LE_EXPR, type, inner, comp_const);
5608 else if (op_code == MAX_EXPR && consts_lt)
5609 /* MAX (X, 0) == 5 -> X == 5 */
5610 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5612 else if (op_code == MAX_EXPR)
5613 /* MAX (X, 0) == -1 -> false */
5614 return omit_one_operand (type, integer_zero_node, inner);
5616 else if (consts_equal)
5617 /* MIN (X, 0) == 0 -> X >= 0 */
5618 return fold_build2 (GE_EXPR, type, inner, comp_const);
5621 /* MIN (X, 0) == 5 -> false */
5622 return omit_one_operand (type, integer_zero_node, inner);
5625 /* MIN (X, 0) == -1 -> X == -1 */
5626 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5629 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5630 /* MAX (X, 0) > 0 -> X > 0
5631 MAX (X, 0) > 5 -> X > 5 */
5632 return fold_build2 (GT_EXPR, type, inner, comp_const);
5634 else if (op_code == MAX_EXPR)
5635 /* MAX (X, 0) > -1 -> true */
5636 return omit_one_operand (type, integer_one_node, inner);
5638 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5639 /* MIN (X, 0) > 0 -> false
5640 MIN (X, 0) > 5 -> false */
5641 return omit_one_operand (type, integer_zero_node, inner);
5644 /* MIN (X, 0) > -1 -> X > -1 */
5645 return fold_build2 (GT_EXPR, type, inner, comp_const);
5652 /* T is an integer expression that is being multiplied, divided, or taken a
5653 modulus (CODE says which and what kind of divide or modulus) by a
5654 constant C. See if we can eliminate that operation by folding it with
5655 other operations already in T. WIDE_TYPE, if non-null, is a type that
5656 should be used for the computation if wider than our type.
5658 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5659 (X * 2) + (Y * 4). We must, however, be assured that either the original
5660 expression would not overflow or that overflow is undefined for the type
5661 in the language in question.
5663 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5664 the machine has a multiply-accumulate insn or that this is part of an
5665 addressing calculation.
5667 If we return a non-null expression, it is an equivalent form of the
5668 original computation, but need not be in the original type.
5670 We set *STRICT_OVERFLOW_P to true if the return values depends on
5671 signed overflow being undefined. Otherwise we do not change
5672 *STRICT_OVERFLOW_P. */
5675 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5676 bool *strict_overflow_p)
5678 /* To avoid exponential search depth, refuse to allow recursion past
5679 three levels. Beyond that (1) it's highly unlikely that we'll find
5680 something interesting and (2) we've probably processed it before
5681 when we built the inner expression. */
5690 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5697 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5698 bool *strict_overflow_p)
5700 tree type = TREE_TYPE (t);
5701 enum tree_code tcode = TREE_CODE (t);
5702 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5703 > GET_MODE_SIZE (TYPE_MODE (type)))
5704 ? wide_type : type);
5706 int same_p = tcode == code;
5707 tree op0 = NULL_TREE, op1 = NULL_TREE;
5708 bool sub_strict_overflow_p;
5710 /* Don't deal with constants of zero here; they confuse the code below. */
5711 if (integer_zerop (c))
5714 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5715 op0 = TREE_OPERAND (t, 0);
5717 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5718 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5720 /* Note that we need not handle conditional operations here since fold
5721 already handles those cases. So just do arithmetic here. */
5725 /* For a constant, we can always simplify if we are a multiply
5726 or (for divide and modulus) if it is a multiple of our constant. */
5727 if (code == MULT_EXPR
5728 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5729 return const_binop (code, fold_convert (ctype, t),
5730 fold_convert (ctype, c), 0);
5733 CASE_CONVERT: case NON_LVALUE_EXPR:
5734 /* If op0 is an expression ... */
5735 if ((COMPARISON_CLASS_P (op0)
5736 || UNARY_CLASS_P (op0)
5737 || BINARY_CLASS_P (op0)
5738 || VL_EXP_CLASS_P (op0)
5739 || EXPRESSION_CLASS_P (op0))
5740 /* ... and has wrapping overflow, and its type is smaller
5741 than ctype, then we cannot pass through as widening. */
5742 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5743 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5744 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5745 && (TYPE_PRECISION (ctype)
5746 > TYPE_PRECISION (TREE_TYPE (op0))))
5747 /* ... or this is a truncation (t is narrower than op0),
5748 then we cannot pass through this narrowing. */
5749 || (TYPE_PRECISION (type)
5750 < TYPE_PRECISION (TREE_TYPE (op0)))
5751 /* ... or signedness changes for division or modulus,
5752 then we cannot pass through this conversion. */
5753 || (code != MULT_EXPR
5754 && (TYPE_UNSIGNED (ctype)
5755 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5756 /* ... or has undefined overflow while the converted to
5757 type has not, we cannot do the operation in the inner type
5758 as that would introduce undefined overflow. */
5759 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5760 && !TYPE_OVERFLOW_UNDEFINED (type))))
5763 /* Pass the constant down and see if we can make a simplification. If
5764 we can, replace this expression with the inner simplification for
5765 possible later conversion to our or some other type. */
5766 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5767 && TREE_CODE (t2) == INTEGER_CST
5768 && !TREE_OVERFLOW (t2)
5769 && (0 != (t1 = extract_muldiv (op0, t2, code,
5771 ? ctype : NULL_TREE,
5772 strict_overflow_p))))
5777 /* If widening the type changes it from signed to unsigned, then we
5778 must avoid building ABS_EXPR itself as unsigned. */
5779 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5781 tree cstype = (*signed_type_for) (ctype);
5782 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5785 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5786 return fold_convert (ctype, t1);
5790 /* If the constant is negative, we cannot simplify this. */
5791 if (tree_int_cst_sgn (c) == -1)
5795 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5797 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5800 case MIN_EXPR: case MAX_EXPR:
5801 /* If widening the type changes the signedness, then we can't perform
5802 this optimization as that changes the result. */
5803 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5806 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5807 sub_strict_overflow_p = false;
5808 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5809 &sub_strict_overflow_p)) != 0
5810 && (t2 = extract_muldiv (op1, c, code, wide_type,
5811 &sub_strict_overflow_p)) != 0)
5813 if (tree_int_cst_sgn (c) < 0)
5814 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5815 if (sub_strict_overflow_p)
5816 *strict_overflow_p = true;
5817 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5818 fold_convert (ctype, t2));
5822 case LSHIFT_EXPR: case RSHIFT_EXPR:
5823 /* If the second operand is constant, this is a multiplication
5824 or floor division, by a power of two, so we can treat it that
5825 way unless the multiplier or divisor overflows. Signed
5826 left-shift overflow is implementation-defined rather than
5827 undefined in C90, so do not convert signed left shift into
5829 if (TREE_CODE (op1) == INTEGER_CST
5830 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5831 /* const_binop may not detect overflow correctly,
5832 so check for it explicitly here. */
5833 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5834 && TREE_INT_CST_HIGH (op1) == 0
5835 && 0 != (t1 = fold_convert (ctype,
5836 const_binop (LSHIFT_EXPR,
5839 && !TREE_OVERFLOW (t1))
5840 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5841 ? MULT_EXPR : FLOOR_DIV_EXPR,
5842 ctype, fold_convert (ctype, op0), t1),
5843 c, code, wide_type, strict_overflow_p);
5846 case PLUS_EXPR: case MINUS_EXPR:
5847 /* See if we can eliminate the operation on both sides. If we can, we
5848 can return a new PLUS or MINUS. If we can't, the only remaining
5849 cases where we can do anything are if the second operand is a
5851 sub_strict_overflow_p = false;
5852 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5853 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5854 if (t1 != 0 && t2 != 0
5855 && (code == MULT_EXPR
5856 /* If not multiplication, we can only do this if both operands
5857 are divisible by c. */
5858 || (multiple_of_p (ctype, op0, c)
5859 && multiple_of_p (ctype, op1, c))))
5861 if (sub_strict_overflow_p)
5862 *strict_overflow_p = true;
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5864 fold_convert (ctype, t2));
5867 /* If this was a subtraction, negate OP1 and set it to be an addition.
5868 This simplifies the logic below. */
5869 if (tcode == MINUS_EXPR)
5870 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5872 if (TREE_CODE (op1) != INTEGER_CST)
5875 /* If either OP1 or C are negative, this optimization is not safe for
5876 some of the division and remainder types while for others we need
5877 to change the code. */
5878 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5880 if (code == CEIL_DIV_EXPR)
5881 code = FLOOR_DIV_EXPR;
5882 else if (code == FLOOR_DIV_EXPR)
5883 code = CEIL_DIV_EXPR;
5884 else if (code != MULT_EXPR
5885 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5889 /* If it's a multiply or a division/modulus operation of a multiple
5890 of our constant, do the operation and verify it doesn't overflow. */
5891 if (code == MULT_EXPR
5892 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5894 op1 = const_binop (code, fold_convert (ctype, op1),
5895 fold_convert (ctype, c), 0);
5896 /* We allow the constant to overflow with wrapping semantics. */
5898 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5904 /* If we have an unsigned type is not a sizetype, we cannot widen
5905 the operation since it will change the result if the original
5906 computation overflowed. */
5907 if (TYPE_UNSIGNED (ctype)
5908 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5912 /* If we were able to eliminate our operation from the first side,
5913 apply our operation to the second side and reform the PLUS. */
5914 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5915 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5917 /* The last case is if we are a multiply. In that case, we can
5918 apply the distributive law to commute the multiply and addition
5919 if the multiplication of the constants doesn't overflow. */
5920 if (code == MULT_EXPR)
5921 return fold_build2 (tcode, ctype,
5922 fold_build2 (code, ctype,
5923 fold_convert (ctype, op0),
5924 fold_convert (ctype, c)),
5930 /* We have a special case here if we are doing something like
5931 (C * 8) % 4 since we know that's zero. */
5932 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5933 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5934 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5935 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5936 return omit_one_operand (type, integer_zero_node, op0);
5938 /* ... fall through ... */
5940 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5941 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5942 /* If we can extract our operation from the LHS, do so and return a
5943 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5944 do something only if the second operand is a constant. */
5946 && (t1 = extract_muldiv (op0, c, code, wide_type,
5947 strict_overflow_p)) != 0)
5948 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5949 fold_convert (ctype, op1));
5950 else if (tcode == MULT_EXPR && code == MULT_EXPR
5951 && (t1 = extract_muldiv (op1, c, code, wide_type,
5952 strict_overflow_p)) != 0)
5953 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5954 fold_convert (ctype, t1));
5955 else if (TREE_CODE (op1) != INTEGER_CST)
5958 /* If these are the same operation types, we can associate them
5959 assuming no overflow. */
5961 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5962 fold_convert (ctype, c), 1))
5963 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5964 TREE_INT_CST_HIGH (t1),
5965 (TYPE_UNSIGNED (ctype)
5966 && tcode != MULT_EXPR) ? -1 : 1,
5967 TREE_OVERFLOW (t1)))
5968 && !TREE_OVERFLOW (t1))
5969 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5971 /* If these operations "cancel" each other, we have the main
5972 optimizations of this pass, which occur when either constant is a
5973 multiple of the other, in which case we replace this with either an
5974 operation or CODE or TCODE.
5976 If we have an unsigned type that is not a sizetype, we cannot do
5977 this since it will change the result if the original computation
5979 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5980 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5981 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5982 || (tcode == MULT_EXPR
5983 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5984 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5985 && code != MULT_EXPR)))
5987 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5990 *strict_overflow_p = true;
5991 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5992 fold_convert (ctype,
5993 const_binop (TRUNC_DIV_EXPR,
5996 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5998 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5999 *strict_overflow_p = true;
6000 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6001 fold_convert (ctype,
6002 const_binop (TRUNC_DIV_EXPR,
6015 /* Return a node which has the indicated constant VALUE (either 0 or
6016 1), and is of the indicated TYPE. */
6019 constant_boolean_node (int value, tree type)
6021 if (type == integer_type_node)
6022 return value ? integer_one_node : integer_zero_node;
6023 else if (type == boolean_type_node)
6024 return value ? boolean_true_node : boolean_false_node;
6026 return build_int_cst (type, value);
6030 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6031 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6032 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6033 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6034 COND is the first argument to CODE; otherwise (as in the example
6035 given here), it is the second argument. TYPE is the type of the
6036 original expression. Return NULL_TREE if no simplification is
6040 fold_binary_op_with_conditional_arg (enum tree_code code,
6041 tree type, tree op0, tree op1,
6042 tree cond, tree arg, int cond_first_p)
6044 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6045 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6046 tree test, true_value, false_value;
6047 tree lhs = NULL_TREE;
6048 tree rhs = NULL_TREE;
6050 /* This transformation is only worthwhile if we don't have to wrap
6051 arg in a SAVE_EXPR, and the operation can be simplified on at least
6052 one of the branches once its pushed inside the COND_EXPR. */
6053 if (!TREE_CONSTANT (arg))
6056 if (TREE_CODE (cond) == COND_EXPR)
6058 test = TREE_OPERAND (cond, 0);
6059 true_value = TREE_OPERAND (cond, 1);
6060 false_value = TREE_OPERAND (cond, 2);
6061 /* If this operand throws an expression, then it does not make
6062 sense to try to perform a logical or arithmetic operation
6064 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6066 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6071 tree testtype = TREE_TYPE (cond);
6073 true_value = constant_boolean_node (true, testtype);
6074 false_value = constant_boolean_node (false, testtype);
6077 arg = fold_convert (arg_type, arg);
6080 true_value = fold_convert (cond_type, true_value);
6082 lhs = fold_build2 (code, type, true_value, arg);
6084 lhs = fold_build2 (code, type, arg, true_value);
6088 false_value = fold_convert (cond_type, false_value);
6090 rhs = fold_build2 (code, type, false_value, arg);
6092 rhs = fold_build2 (code, type, arg, false_value);
6095 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6096 return fold_convert (type, test);
6100 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6102 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6103 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6104 ADDEND is the same as X.
6106 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6107 and finite. The problematic cases are when X is zero, and its mode
6108 has signed zeros. In the case of rounding towards -infinity,
6109 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6110 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6113 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6115 if (!real_zerop (addend))
6118 /* Don't allow the fold with -fsignaling-nans. */
6119 if (HONOR_SNANS (TYPE_MODE (type)))
6122 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6123 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6126 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6127 if (TREE_CODE (addend) == REAL_CST
6128 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6131 /* The mode has signed zeros, and we have to honor their sign.
6132 In this situation, there is only one case we can return true for.
6133 X - 0 is the same as X unless rounding towards -infinity is
6135 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6138 /* Subroutine of fold() that checks comparisons of built-in math
6139 functions against real constants.
6141 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6142 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6143 is the type of the result and ARG0 and ARG1 are the operands of the
6144 comparison. ARG1 must be a TREE_REAL_CST.
6146 The function returns the constant folded tree if a simplification
6147 can be made, and NULL_TREE otherwise. */
6150 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6151 tree type, tree arg0, tree arg1)
6155 if (BUILTIN_SQRT_P (fcode))
6157 tree arg = CALL_EXPR_ARG (arg0, 0);
6158 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6160 c = TREE_REAL_CST (arg1);
6161 if (REAL_VALUE_NEGATIVE (c))
6163 /* sqrt(x) < y is always false, if y is negative. */
6164 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6165 return omit_one_operand (type, integer_zero_node, arg);
6167 /* sqrt(x) > y is always true, if y is negative and we
6168 don't care about NaNs, i.e. negative values of x. */
6169 if (code == NE_EXPR || !HONOR_NANS (mode))
6170 return omit_one_operand (type, integer_one_node, arg);
6172 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6173 return fold_build2 (GE_EXPR, type, arg,
6174 build_real (TREE_TYPE (arg), dconst0));
6176 else if (code == GT_EXPR || code == GE_EXPR)
6180 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6181 real_convert (&c2, mode, &c2);
6183 if (REAL_VALUE_ISINF (c2))
6185 /* sqrt(x) > y is x == +Inf, when y is very large. */
6186 if (HONOR_INFINITIES (mode))
6187 return fold_build2 (EQ_EXPR, type, arg,
6188 build_real (TREE_TYPE (arg), c2));
6190 /* sqrt(x) > y is always false, when y is very large
6191 and we don't care about infinities. */
6192 return omit_one_operand (type, integer_zero_node, arg);
6195 /* sqrt(x) > c is the same as x > c*c. */
6196 return fold_build2 (code, type, arg,
6197 build_real (TREE_TYPE (arg), c2));
6199 else if (code == LT_EXPR || code == LE_EXPR)
6203 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6204 real_convert (&c2, mode, &c2);
6206 if (REAL_VALUE_ISINF (c2))
6208 /* sqrt(x) < y is always true, when y is a very large
6209 value and we don't care about NaNs or Infinities. */
6210 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6211 return omit_one_operand (type, integer_one_node, arg);
6213 /* sqrt(x) < y is x != +Inf when y is very large and we
6214 don't care about NaNs. */
6215 if (! HONOR_NANS (mode))
6216 return fold_build2 (NE_EXPR, type, arg,
6217 build_real (TREE_TYPE (arg), c2));
6219 /* sqrt(x) < y is x >= 0 when y is very large and we
6220 don't care about Infinities. */
6221 if (! HONOR_INFINITIES (mode))
6222 return fold_build2 (GE_EXPR, type, arg,
6223 build_real (TREE_TYPE (arg), dconst0));
6225 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6226 if (lang_hooks.decls.global_bindings_p () != 0
6227 || CONTAINS_PLACEHOLDER_P (arg))
6230 arg = save_expr (arg);
6231 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6232 fold_build2 (GE_EXPR, type, arg,
6233 build_real (TREE_TYPE (arg),
6235 fold_build2 (NE_EXPR, type, arg,
6236 build_real (TREE_TYPE (arg),
6240 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6241 if (! HONOR_NANS (mode))
6242 return fold_build2 (code, type, arg,
6243 build_real (TREE_TYPE (arg), c2));
6245 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6246 if (lang_hooks.decls.global_bindings_p () == 0
6247 && ! CONTAINS_PLACEHOLDER_P (arg))
6249 arg = save_expr (arg);
6250 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6251 fold_build2 (GE_EXPR, type, arg,
6252 build_real (TREE_TYPE (arg),
6254 fold_build2 (code, type, arg,
6255 build_real (TREE_TYPE (arg),
6264 /* Subroutine of fold() that optimizes comparisons against Infinities,
6265 either +Inf or -Inf.
6267 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6268 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6269 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6271 The function returns the constant folded tree if a simplification
6272 can be made, and NULL_TREE otherwise. */
6275 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6277 enum machine_mode mode;
6278 REAL_VALUE_TYPE max;
6282 mode = TYPE_MODE (TREE_TYPE (arg0));
6284 /* For negative infinity swap the sense of the comparison. */
6285 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6287 code = swap_tree_comparison (code);
6292 /* x > +Inf is always false, if with ignore sNANs. */
6293 if (HONOR_SNANS (mode))
6295 return omit_one_operand (type, integer_zero_node, arg0);
6298 /* x <= +Inf is always true, if we don't case about NaNs. */
6299 if (! HONOR_NANS (mode))
6300 return omit_one_operand (type, integer_one_node, arg0);
6302 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6303 if (lang_hooks.decls.global_bindings_p () == 0
6304 && ! CONTAINS_PLACEHOLDER_P (arg0))
6306 arg0 = save_expr (arg0);
6307 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6313 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6314 real_maxval (&max, neg, mode);
6315 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6316 arg0, build_real (TREE_TYPE (arg0), max));
6319 /* x < +Inf is always equal to x <= DBL_MAX. */
6320 real_maxval (&max, neg, mode);
6321 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6322 arg0, build_real (TREE_TYPE (arg0), max));
6325 /* x != +Inf is always equal to !(x > DBL_MAX). */
6326 real_maxval (&max, neg, mode);
6327 if (! HONOR_NANS (mode))
6328 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6329 arg0, build_real (TREE_TYPE (arg0), max));
6331 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6332 arg0, build_real (TREE_TYPE (arg0), max));
6333 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6342 /* Subroutine of fold() that optimizes comparisons of a division by
6343 a nonzero integer constant against an integer constant, i.e.
6346 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6347 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6348 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6350 The function returns the constant folded tree if a simplification
6351 can be made, and NULL_TREE otherwise. */
6354 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6356 tree prod, tmp, hi, lo;
6357 tree arg00 = TREE_OPERAND (arg0, 0);
6358 tree arg01 = TREE_OPERAND (arg0, 1);
6359 unsigned HOST_WIDE_INT lpart;
6360 HOST_WIDE_INT hpart;
6361 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6365 /* We have to do this the hard way to detect unsigned overflow.
6366 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6367 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6368 TREE_INT_CST_HIGH (arg01),
6369 TREE_INT_CST_LOW (arg1),
6370 TREE_INT_CST_HIGH (arg1),
6371 &lpart, &hpart, unsigned_p);
6372 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6374 neg_overflow = false;
6378 tmp = int_const_binop (MINUS_EXPR, arg01,
6379 build_int_cst (TREE_TYPE (arg01), 1), 0);
6382 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6383 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6384 TREE_INT_CST_HIGH (prod),
6385 TREE_INT_CST_LOW (tmp),
6386 TREE_INT_CST_HIGH (tmp),
6387 &lpart, &hpart, unsigned_p);
6388 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6389 -1, overflow | TREE_OVERFLOW (prod));
6391 else if (tree_int_cst_sgn (arg01) >= 0)
6393 tmp = int_const_binop (MINUS_EXPR, arg01,
6394 build_int_cst (TREE_TYPE (arg01), 1), 0);
6395 switch (tree_int_cst_sgn (arg1))
6398 neg_overflow = true;
6399 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6404 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6409 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6419 /* A negative divisor reverses the relational operators. */
6420 code = swap_tree_comparison (code);
6422 tmp = int_const_binop (PLUS_EXPR, arg01,
6423 build_int_cst (TREE_TYPE (arg01), 1), 0);
6424 switch (tree_int_cst_sgn (arg1))
6427 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6432 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6437 neg_overflow = true;
6438 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6450 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6451 return omit_one_operand (type, integer_zero_node, arg00);
6452 if (TREE_OVERFLOW (hi))
6453 return fold_build2 (GE_EXPR, type, arg00, lo);
6454 if (TREE_OVERFLOW (lo))
6455 return fold_build2 (LE_EXPR, type, arg00, hi);
6456 return build_range_check (type, arg00, 1, lo, hi);
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand (type, integer_one_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2 (LT_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2 (GT_EXPR, type, arg00, hi);
6465 return build_range_check (type, arg00, 0, lo, hi);
6468 if (TREE_OVERFLOW (lo))
6470 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6471 return omit_one_operand (type, tmp, arg00);
6473 return fold_build2 (LT_EXPR, type, arg00, lo);
6476 if (TREE_OVERFLOW (hi))
6478 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6479 return omit_one_operand (type, tmp, arg00);
6481 return fold_build2 (LE_EXPR, type, arg00, hi);
6484 if (TREE_OVERFLOW (hi))
6486 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6487 return omit_one_operand (type, tmp, arg00);
6489 return fold_build2 (GT_EXPR, type, arg00, hi);
6492 if (TREE_OVERFLOW (lo))
6494 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6495 return omit_one_operand (type, tmp, arg00);
6497 return fold_build2 (GE_EXPR, type, arg00, lo);
6507 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6508 equality/inequality test, then return a simplified form of the test
6509 using a sign testing. Otherwise return NULL. TYPE is the desired
6513 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6516 /* If this is testing a single bit, we can optimize the test. */
6517 if ((code == NE_EXPR || code == EQ_EXPR)
6518 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6519 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6521 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6522 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6523 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6525 if (arg00 != NULL_TREE
6526 /* This is only a win if casting to a signed type is cheap,
6527 i.e. when arg00's type is not a partial mode. */
6528 && TYPE_PRECISION (TREE_TYPE (arg00))
6529 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6531 tree stype = signed_type_for (TREE_TYPE (arg00));
6532 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6533 result_type, fold_convert (stype, arg00),
6534 build_int_cst (stype, 0));
6541 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6542 equality/inequality test, then return a simplified form of
6543 the test using shifts and logical operations. Otherwise return
6544 NULL. TYPE is the desired result type. */
6547 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6550 /* If this is testing a single bit, we can optimize the test. */
6551 if ((code == NE_EXPR || code == EQ_EXPR)
6552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6555 tree inner = TREE_OPERAND (arg0, 0);
6556 tree type = TREE_TYPE (arg0);
6557 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6558 enum machine_mode operand_mode = TYPE_MODE (type);
6560 tree signed_type, unsigned_type, intermediate_type;
6563 /* First, see if we can fold the single bit test into a sign-bit
6565 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6570 /* Otherwise we have (A & C) != 0 where C is a single bit,
6571 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6572 Similarly for (A & C) == 0. */
6574 /* If INNER is a right shift of a constant and it plus BITNUM does
6575 not overflow, adjust BITNUM and INNER. */
6576 if (TREE_CODE (inner) == RSHIFT_EXPR
6577 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6578 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6579 && bitnum < TYPE_PRECISION (type)
6580 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6581 bitnum - TYPE_PRECISION (type)))
6583 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6584 inner = TREE_OPERAND (inner, 0);
6587 /* If we are going to be able to omit the AND below, we must do our
6588 operations as unsigned. If we must use the AND, we have a choice.
6589 Normally unsigned is faster, but for some machines signed is. */
6590 #ifdef LOAD_EXTEND_OP
6591 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6592 && !flag_syntax_only) ? 0 : 1;
6597 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6598 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6599 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6600 inner = fold_convert (intermediate_type, inner);
6603 inner = build2 (RSHIFT_EXPR, intermediate_type,
6604 inner, size_int (bitnum));
6606 one = build_int_cst (intermediate_type, 1);
6608 if (code == EQ_EXPR)
6609 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6611 /* Put the AND last so it can combine with more things. */
6612 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6614 /* Make sure to return the proper type. */
6615 inner = fold_convert (result_type, inner);
6622 /* Check whether we are allowed to reorder operands arg0 and arg1,
6623 such that the evaluation of arg1 occurs before arg0. */
6626 reorder_operands_p (const_tree arg0, const_tree arg1)
6628 if (! flag_evaluation_order)
6630 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6632 return ! TREE_SIDE_EFFECTS (arg0)
6633 && ! TREE_SIDE_EFFECTS (arg1);
6636 /* Test whether it is preferable two swap two operands, ARG0 and
6637 ARG1, for example because ARG0 is an integer constant and ARG1
6638 isn't. If REORDER is true, only recommend swapping if we can
6639 evaluate the operands in reverse order. */
6642 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6644 STRIP_SIGN_NOPS (arg0);
6645 STRIP_SIGN_NOPS (arg1);
6647 if (TREE_CODE (arg1) == INTEGER_CST)
6649 if (TREE_CODE (arg0) == INTEGER_CST)
6652 if (TREE_CODE (arg1) == REAL_CST)
6654 if (TREE_CODE (arg0) == REAL_CST)
6657 if (TREE_CODE (arg1) == FIXED_CST)
6659 if (TREE_CODE (arg0) == FIXED_CST)
6662 if (TREE_CODE (arg1) == COMPLEX_CST)
6664 if (TREE_CODE (arg0) == COMPLEX_CST)
6667 if (TREE_CONSTANT (arg1))
6669 if (TREE_CONSTANT (arg0))
6675 if (reorder && flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0) == SSA_NAME
6684 && TREE_CODE (arg1) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1) == SSA_NAME)
6691 if (TREE_CODE (arg0) == SSA_NAME)
6694 /* Put variables last. */
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6707 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6709 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6711 tree shorter_type, outer_type;
6715 if (arg0_unw == arg0)
6717 shorter_type = TREE_TYPE (arg0_unw);
6719 #ifdef HAVE_canonicalize_funcptr_for_compare
6720 /* Disable this optimization if we're casting a function pointer
6721 type on targets that require function pointer canonicalization. */
6722 if (HAVE_canonicalize_funcptr_for_compare
6723 && TREE_CODE (shorter_type) == POINTER_TYPE
6724 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6728 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6731 arg1_unw = get_unwidened (arg1, NULL_TREE);
6733 /* If possible, express the comparison in the shorter mode. */
6734 if ((code == EQ_EXPR || code == NE_EXPR
6735 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6736 && (TREE_TYPE (arg1_unw) == shorter_type
6737 || (TYPE_PRECISION (shorter_type)
6738 > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6739 || ((TYPE_PRECISION (shorter_type)
6740 == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6741 && (TYPE_UNSIGNED (shorter_type)
6742 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6743 || (TREE_CODE (arg1_unw) == INTEGER_CST
6744 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6745 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6746 && int_fits_type_p (arg1_unw, shorter_type))))
6747 return fold_build2 (code, type, arg0_unw,
6748 fold_convert (shorter_type, arg1_unw));
6750 if (TREE_CODE (arg1_unw) != INTEGER_CST
6751 || TREE_CODE (shorter_type) != INTEGER_TYPE
6752 || !int_fits_type_p (arg1_unw, shorter_type))
6755 /* If we are comparing with the integer that does not fit into the range
6756 of the shorter type, the result is known. */
6757 outer_type = TREE_TYPE (arg1_unw);
6758 min = lower_bound_in_type (outer_type, shorter_type);
6759 max = upper_bound_in_type (outer_type, shorter_type);
6761 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6763 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6770 return omit_one_operand (type, integer_zero_node, arg0);
6775 return omit_one_operand (type, integer_one_node, arg0);
6781 return omit_one_operand (type, integer_one_node, arg0);
6783 return omit_one_operand (type, integer_zero_node, arg0);
6788 return omit_one_operand (type, integer_zero_node, arg0);
6790 return omit_one_operand (type, integer_one_node, arg0);
6799 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6800 ARG0 just the signedness is changed. */
6803 fold_sign_changed_comparison (enum tree_code code, tree type,
6804 tree arg0, tree arg1)
6807 tree inner_type, outer_type;
6809 if (!CONVERT_EXPR_P (arg0))
6812 outer_type = TREE_TYPE (arg0);
6813 arg0_inner = TREE_OPERAND (arg0, 0);
6814 inner_type = TREE_TYPE (arg0_inner);
6816 #ifdef HAVE_canonicalize_funcptr_for_compare
6817 /* Disable this optimization if we're casting a function pointer
6818 type on targets that require function pointer canonicalization. */
6819 if (HAVE_canonicalize_funcptr_for_compare
6820 && TREE_CODE (inner_type) == POINTER_TYPE
6821 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6825 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6828 /* If the conversion is from an integral subtype to its basetype
6830 if (TREE_TYPE (inner_type) == outer_type)
6833 if (TREE_CODE (arg1) != INTEGER_CST
6834 && !(CONVERT_EXPR_P (arg1)
6835 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6838 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6839 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6844 if (TREE_CODE (arg1) == INTEGER_CST)
6845 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6846 TREE_INT_CST_HIGH (arg1), 0,
6847 TREE_OVERFLOW (arg1));
6849 arg1 = fold_convert (inner_type, arg1);
6851 return fold_build2 (code, type, arg0_inner, arg1);
6854 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6855 step of the array. Reconstructs s and delta in the case of s * delta
6856 being an integer constant (and thus already folded).
6857 ADDR is the address. MULT is the multiplicative expression.
6858 If the function succeeds, the new address expression is returned. Otherwise
6859 NULL_TREE is returned. */
6862 try_move_mult_to_index (tree addr, tree op1)
6864 tree s, delta, step;
6865 tree ref = TREE_OPERAND (addr, 0), pref;
6870 /* Strip the nops that might be added when converting op1 to sizetype. */
6873 /* Canonicalize op1 into a possibly non-constant delta
6874 and an INTEGER_CST s. */
6875 if (TREE_CODE (op1) == MULT_EXPR)
6877 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6882 if (TREE_CODE (arg0) == INTEGER_CST)
6887 else if (TREE_CODE (arg1) == INTEGER_CST)
6895 else if (TREE_CODE (op1) == INTEGER_CST)
6902 /* Simulate we are delta * 1. */
6904 s = integer_one_node;
6907 for (;; ref = TREE_OPERAND (ref, 0))
6909 if (TREE_CODE (ref) == ARRAY_REF)
6911 /* Remember if this was a multi-dimensional array. */
6912 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6915 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6919 step = array_ref_element_size (ref);
6920 if (TREE_CODE (step) != INTEGER_CST)
6925 if (! tree_int_cst_equal (step, s))
6930 /* Try if delta is a multiple of step. */
6931 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6937 /* Only fold here if we can verify we do not overflow one
6938 dimension of a multi-dimensional array. */
6943 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6944 || !INTEGRAL_TYPE_P (itype)
6945 || !TYPE_MAX_VALUE (itype)
6946 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6949 tmp = fold_binary (PLUS_EXPR, itype,
6950 fold_convert (itype,
6951 TREE_OPERAND (ref, 1)),
6952 fold_convert (itype, delta));
6954 || TREE_CODE (tmp) != INTEGER_CST
6955 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6964 if (!handled_component_p (ref))
6968 /* We found the suitable array reference. So copy everything up to it,
6969 and replace the index. */
6971 pref = TREE_OPERAND (addr, 0);
6972 ret = copy_node (pref);
6977 pref = TREE_OPERAND (pref, 0);
6978 TREE_OPERAND (pos, 0) = copy_node (pref);
6979 pos = TREE_OPERAND (pos, 0);
6982 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6983 fold_convert (itype,
6984 TREE_OPERAND (pos, 1)),
6985 fold_convert (itype, delta));
6987 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6991 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6992 means A >= Y && A != MAX, but in this case we know that
6993 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6996 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6998 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7000 if (TREE_CODE (bound) == LT_EXPR)
7001 a = TREE_OPERAND (bound, 0);
7002 else if (TREE_CODE (bound) == GT_EXPR)
7003 a = TREE_OPERAND (bound, 1);
7007 typea = TREE_TYPE (a);
7008 if (!INTEGRAL_TYPE_P (typea)
7009 && !POINTER_TYPE_P (typea))
7012 if (TREE_CODE (ineq) == LT_EXPR)
7014 a1 = TREE_OPERAND (ineq, 1);
7015 y = TREE_OPERAND (ineq, 0);
7017 else if (TREE_CODE (ineq) == GT_EXPR)
7019 a1 = TREE_OPERAND (ineq, 0);
7020 y = TREE_OPERAND (ineq, 1);
7025 if (TREE_TYPE (a1) != typea)
7028 if (POINTER_TYPE_P (typea))
7030 /* Convert the pointer types into integer before taking the difference. */
7031 tree ta = fold_convert (ssizetype, a);
7032 tree ta1 = fold_convert (ssizetype, a1);
7033 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7036 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7038 if (!diff || !integer_onep (diff))
7041 return fold_build2 (GE_EXPR, type, a, y);
7044 /* Fold a sum or difference of at least one multiplication.
7045 Returns the folded tree or NULL if no simplification could be made. */
7048 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7050 tree arg00, arg01, arg10, arg11;
7051 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7053 /* (A * C) +- (B * C) -> (A+-B) * C.
7054 (A * C) +- A -> A * (C+-1).
7055 We are most concerned about the case where C is a constant,
7056 but other combinations show up during loop reduction. Since
7057 it is not difficult, try all four possibilities. */
7059 if (TREE_CODE (arg0) == MULT_EXPR)
7061 arg00 = TREE_OPERAND (arg0, 0);
7062 arg01 = TREE_OPERAND (arg0, 1);
7064 else if (TREE_CODE (arg0) == INTEGER_CST)
7066 arg00 = build_one_cst (type);
7071 /* We cannot generate constant 1 for fract. */
7072 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7075 arg01 = build_one_cst (type);
7077 if (TREE_CODE (arg1) == MULT_EXPR)
7079 arg10 = TREE_OPERAND (arg1, 0);
7080 arg11 = TREE_OPERAND (arg1, 1);
7082 else if (TREE_CODE (arg1) == INTEGER_CST)
7084 arg10 = build_one_cst (type);
7089 /* We cannot generate constant 1 for fract. */
7090 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7093 arg11 = build_one_cst (type);
7097 if (operand_equal_p (arg01, arg11, 0))
7098 same = arg01, alt0 = arg00, alt1 = arg10;
7099 else if (operand_equal_p (arg00, arg10, 0))
7100 same = arg00, alt0 = arg01, alt1 = arg11;
7101 else if (operand_equal_p (arg00, arg11, 0))
7102 same = arg00, alt0 = arg01, alt1 = arg10;
7103 else if (operand_equal_p (arg01, arg10, 0))
7104 same = arg01, alt0 = arg00, alt1 = arg11;
7106 /* No identical multiplicands; see if we can find a common
7107 power-of-two factor in non-power-of-two multiplies. This
7108 can help in multi-dimensional array access. */
7109 else if (host_integerp (arg01, 0)
7110 && host_integerp (arg11, 0))
7112 HOST_WIDE_INT int01, int11, tmp;
7115 int01 = TREE_INT_CST_LOW (arg01);
7116 int11 = TREE_INT_CST_LOW (arg11);
7118 /* Move min of absolute values to int11. */
7119 if ((int01 >= 0 ? int01 : -int01)
7120 < (int11 >= 0 ? int11 : -int11))
7122 tmp = int01, int01 = int11, int11 = tmp;
7123 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7130 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7132 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7133 build_int_cst (TREE_TYPE (arg00),
7138 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7143 return fold_build2 (MULT_EXPR, type,
7144 fold_build2 (code, type,
7145 fold_convert (type, alt0),
7146 fold_convert (type, alt1)),
7147 fold_convert (type, same));
7152 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7153 specified by EXPR into the buffer PTR of length LEN bytes.
7154 Return the number of bytes placed in the buffer, or zero
7158 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7160 tree type = TREE_TYPE (expr);
7161 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7162 int byte, offset, word, words;
7163 unsigned char value;
7165 if (total_bytes > len)
7167 words = total_bytes / UNITS_PER_WORD;
7169 for (byte = 0; byte < total_bytes; byte++)
7171 int bitpos = byte * BITS_PER_UNIT;
7172 if (bitpos < HOST_BITS_PER_WIDE_INT)
7173 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7175 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7176 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7178 if (total_bytes > UNITS_PER_WORD)
7180 word = byte / UNITS_PER_WORD;
7181 if (WORDS_BIG_ENDIAN)
7182 word = (words - 1) - word;
7183 offset = word * UNITS_PER_WORD;
7184 if (BYTES_BIG_ENDIAN)
7185 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7187 offset += byte % UNITS_PER_WORD;
7190 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7191 ptr[offset] = value;
7197 /* Subroutine of native_encode_expr. Encode the REAL_CST
7198 specified by EXPR into the buffer PTR of length LEN bytes.
7199 Return the number of bytes placed in the buffer, or zero
7203 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7205 tree type = TREE_TYPE (expr);
7206 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7207 int byte, offset, word, words, bitpos;
7208 unsigned char value;
7210 /* There are always 32 bits in each long, no matter the size of
7211 the hosts long. We handle floating point representations with
7215 if (total_bytes > len)
7217 words = 32 / UNITS_PER_WORD;
7219 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7221 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7222 bitpos += BITS_PER_UNIT)
7224 byte = (bitpos / BITS_PER_UNIT) & 3;
7225 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7227 if (UNITS_PER_WORD < 4)
7229 word = byte / UNITS_PER_WORD;
7230 if (WORDS_BIG_ENDIAN)
7231 word = (words - 1) - word;
7232 offset = word * UNITS_PER_WORD;
7233 if (BYTES_BIG_ENDIAN)
7234 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7236 offset += byte % UNITS_PER_WORD;
7239 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7240 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7245 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7246 specified by EXPR into the buffer PTR of length LEN bytes.
7247 Return the number of bytes placed in the buffer, or zero
7251 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7256 part = TREE_REALPART (expr);
7257 rsize = native_encode_expr (part, ptr, len);
7260 part = TREE_IMAGPART (expr);
7261 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7264 return rsize + isize;
7268 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7269 specified by EXPR into the buffer PTR of length LEN bytes.
7270 Return the number of bytes placed in the buffer, or zero
7274 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7276 int i, size, offset, count;
7277 tree itype, elem, elements;
7280 elements = TREE_VECTOR_CST_ELTS (expr);
7281 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7282 itype = TREE_TYPE (TREE_TYPE (expr));
7283 size = GET_MODE_SIZE (TYPE_MODE (itype));
7284 for (i = 0; i < count; i++)
7288 elem = TREE_VALUE (elements);
7289 elements = TREE_CHAIN (elements);
7296 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7301 if (offset + size > len)
7303 memset (ptr+offset, 0, size);
7311 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7312 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7313 buffer PTR of length LEN bytes. Return the number of bytes
7314 placed in the buffer, or zero upon failure. */
7317 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7319 switch (TREE_CODE (expr))
7322 return native_encode_int (expr, ptr, len);
7325 return native_encode_real (expr, ptr, len);
7328 return native_encode_complex (expr, ptr, len);
7331 return native_encode_vector (expr, ptr, len);
7339 /* Subroutine of native_interpret_expr. Interpret the contents of
7340 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7341 If the buffer cannot be interpreted, return NULL_TREE. */
7344 native_interpret_int (tree type, const unsigned char *ptr, int len)
7346 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7347 int byte, offset, word, words;
7348 unsigned char value;
7349 unsigned int HOST_WIDE_INT lo = 0;
7350 HOST_WIDE_INT hi = 0;
7352 if (total_bytes > len)
7354 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7356 words = total_bytes / UNITS_PER_WORD;
7358 for (byte = 0; byte < total_bytes; byte++)
7360 int bitpos = byte * BITS_PER_UNIT;
7361 if (total_bytes > UNITS_PER_WORD)
7363 word = byte / UNITS_PER_WORD;
7364 if (WORDS_BIG_ENDIAN)
7365 word = (words - 1) - word;
7366 offset = word * UNITS_PER_WORD;
7367 if (BYTES_BIG_ENDIAN)
7368 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7370 offset += byte % UNITS_PER_WORD;
7373 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7374 value = ptr[offset];
7376 if (bitpos < HOST_BITS_PER_WIDE_INT)
7377 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7379 hi |= (unsigned HOST_WIDE_INT) value
7380 << (bitpos - HOST_BITS_PER_WIDE_INT);
7383 return build_int_cst_wide_type (type, lo, hi);
7387 /* Subroutine of native_interpret_expr. Interpret the contents of
7388 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7389 If the buffer cannot be interpreted, return NULL_TREE. */
7392 native_interpret_real (tree type, const unsigned char *ptr, int len)
7394 enum machine_mode mode = TYPE_MODE (type);
7395 int total_bytes = GET_MODE_SIZE (mode);
7396 int byte, offset, word, words, bitpos;
7397 unsigned char value;
7398 /* There are always 32 bits in each long, no matter the size of
7399 the hosts long. We handle floating point representations with
7404 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7405 if (total_bytes > len || total_bytes > 24)
7407 words = 32 / UNITS_PER_WORD;
7409 memset (tmp, 0, sizeof (tmp));
7410 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7411 bitpos += BITS_PER_UNIT)
7413 byte = (bitpos / BITS_PER_UNIT) & 3;
7414 if (UNITS_PER_WORD < 4)
7416 word = byte / UNITS_PER_WORD;
7417 if (WORDS_BIG_ENDIAN)
7418 word = (words - 1) - word;
7419 offset = word * UNITS_PER_WORD;
7420 if (BYTES_BIG_ENDIAN)
7421 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7423 offset += byte % UNITS_PER_WORD;
7426 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7427 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7429 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7432 real_from_target (&r, tmp, mode);
7433 return build_real (type, r);
7437 /* Subroutine of native_interpret_expr. Interpret the contents of
7438 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7439 If the buffer cannot be interpreted, return NULL_TREE. */
7442 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7444 tree etype, rpart, ipart;
7447 etype = TREE_TYPE (type);
7448 size = GET_MODE_SIZE (TYPE_MODE (etype));
7451 rpart = native_interpret_expr (etype, ptr, size);
7454 ipart = native_interpret_expr (etype, ptr+size, size);
7457 return build_complex (type, rpart, ipart);
7461 /* Subroutine of native_interpret_expr. Interpret the contents of
7462 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7463 If the buffer cannot be interpreted, return NULL_TREE. */
7466 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7468 tree etype, elem, elements;
7471 etype = TREE_TYPE (type);
7472 size = GET_MODE_SIZE (TYPE_MODE (etype));
7473 count = TYPE_VECTOR_SUBPARTS (type);
7474 if (size * count > len)
7477 elements = NULL_TREE;
7478 for (i = count - 1; i >= 0; i--)
7480 elem = native_interpret_expr (etype, ptr+(i*size), size);
7483 elements = tree_cons (NULL_TREE, elem, elements);
7485 return build_vector (type, elements);
7489 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7490 the buffer PTR of length LEN as a constant of type TYPE. For
7491 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7492 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7493 return NULL_TREE. */
7496 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7498 switch (TREE_CODE (type))
7503 return native_interpret_int (type, ptr, len);
7506 return native_interpret_real (type, ptr, len);
7509 return native_interpret_complex (type, ptr, len);
7512 return native_interpret_vector (type, ptr, len);
7520 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7521 TYPE at compile-time. If we're unable to perform the conversion
7522 return NULL_TREE. */
7525 fold_view_convert_expr (tree type, tree expr)
7527 /* We support up to 512-bit values (for V8DFmode). */
7528 unsigned char buffer[64];
7531 /* Check that the host and target are sane. */
7532 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7535 len = native_encode_expr (expr, buffer, sizeof (buffer));
7539 return native_interpret_expr (type, buffer, len);
7542 /* Build an expression for the address of T. Folds away INDIRECT_REF
7543 to avoid confusing the gimplify process. When IN_FOLD is true
7544 avoid modifications of T. */
7547 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7549 /* The size of the object is not relevant when talking about its address. */
7550 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7551 t = TREE_OPERAND (t, 0);
7553 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7554 if (TREE_CODE (t) == INDIRECT_REF
7555 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7557 t = TREE_OPERAND (t, 0);
7559 if (TREE_TYPE (t) != ptrtype)
7560 t = build1 (NOP_EXPR, ptrtype, t);
7566 while (handled_component_p (base))
7567 base = TREE_OPERAND (base, 0);
7570 TREE_ADDRESSABLE (base) = 1;
7572 t = build1 (ADDR_EXPR, ptrtype, t);
7575 t = build1 (ADDR_EXPR, ptrtype, t);
7580 /* Build an expression for the address of T with type PTRTYPE. This
7581 function modifies the input parameter 'T' by sometimes setting the
7582 TREE_ADDRESSABLE flag. */
7585 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7587 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7590 /* Build an expression for the address of T. This function modifies
7591 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7592 flag. When called from fold functions, use fold_addr_expr instead. */
7595 build_fold_addr_expr (tree t)
7597 return build_fold_addr_expr_with_type_1 (t,
7598 build_pointer_type (TREE_TYPE (t)),
7602 /* Same as build_fold_addr_expr, builds an expression for the address
7603 of T, but avoids touching the input node 't'. Fold functions
7604 should use this version. */
7607 fold_addr_expr (tree t)
7609 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7611 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7614 /* Fold a unary expression of code CODE and type TYPE with operand
7615 OP0. Return the folded expression if folding is successful.
7616 Otherwise, return NULL_TREE. */
7619 fold_unary (enum tree_code code, tree type, tree op0)
7623 enum tree_code_class kind = TREE_CODE_CLASS (code);
7625 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7626 && TREE_CODE_LENGTH (code) == 1);
7631 if (code == NOP_EXPR || code == CONVERT_EXPR
7632 || code == FLOAT_EXPR || code == ABS_EXPR)
7634 /* Don't use STRIP_NOPS, because signedness of argument type
7636 STRIP_SIGN_NOPS (arg0);
7640 /* Strip any conversions that don't change the mode. This
7641 is safe for every expression, except for a comparison
7642 expression because its signedness is derived from its
7645 Note that this is done as an internal manipulation within
7646 the constant folder, in order to find the simplest
7647 representation of the arguments so that their form can be
7648 studied. In any cases, the appropriate type conversions
7649 should be put back in the tree that will get out of the
7655 if (TREE_CODE_CLASS (code) == tcc_unary)
7657 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7658 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7659 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7660 else if (TREE_CODE (arg0) == COND_EXPR)
7662 tree arg01 = TREE_OPERAND (arg0, 1);
7663 tree arg02 = TREE_OPERAND (arg0, 2);
7664 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7665 arg01 = fold_build1 (code, type, arg01);
7666 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7667 arg02 = fold_build1 (code, type, arg02);
7668 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7671 /* If this was a conversion, and all we did was to move into
7672 inside the COND_EXPR, bring it back out. But leave it if
7673 it is a conversion from integer to integer and the
7674 result precision is no wider than a word since such a
7675 conversion is cheap and may be optimized away by combine,
7676 while it couldn't if it were outside the COND_EXPR. Then return
7677 so we don't get into an infinite recursion loop taking the
7678 conversion out and then back in. */
7680 if ((code == NOP_EXPR || code == CONVERT_EXPR
7681 || code == NON_LVALUE_EXPR)
7682 && TREE_CODE (tem) == COND_EXPR
7683 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7684 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7685 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7686 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7687 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7688 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7689 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7691 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7692 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7693 || flag_syntax_only))
7694 tem = build1 (code, type,
7696 TREE_TYPE (TREE_OPERAND
7697 (TREE_OPERAND (tem, 1), 0)),
7698 TREE_OPERAND (tem, 0),
7699 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7700 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7703 else if (COMPARISON_CLASS_P (arg0))
7705 if (TREE_CODE (type) == BOOLEAN_TYPE)
7707 arg0 = copy_node (arg0);
7708 TREE_TYPE (arg0) = type;
7711 else if (TREE_CODE (type) != INTEGER_TYPE)
7712 return fold_build3 (COND_EXPR, type, arg0,
7713 fold_build1 (code, type,
7715 fold_build1 (code, type,
7716 integer_zero_node));
7723 /* Re-association barriers around constants and other re-association
7724 barriers can be removed. */
7725 if (CONSTANT_CLASS_P (op0)
7726 || TREE_CODE (op0) == PAREN_EXPR)
7727 return fold_convert (type, op0);
7732 case FIX_TRUNC_EXPR:
7733 if (TREE_TYPE (op0) == type)
7736 /* If we have (type) (a CMP b) and type is an integral type, return
7737 new expression involving the new type. */
7738 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7739 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7740 TREE_OPERAND (op0, 1));
7742 /* Handle cases of two conversions in a row. */
7743 if (CONVERT_EXPR_P (op0))
7745 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7746 tree inter_type = TREE_TYPE (op0);
7747 int inside_int = INTEGRAL_TYPE_P (inside_type);
7748 int inside_ptr = POINTER_TYPE_P (inside_type);
7749 int inside_float = FLOAT_TYPE_P (inside_type);
7750 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7751 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7752 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7753 int inter_int = INTEGRAL_TYPE_P (inter_type);
7754 int inter_ptr = POINTER_TYPE_P (inter_type);
7755 int inter_float = FLOAT_TYPE_P (inter_type);
7756 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7757 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7758 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7759 int final_int = INTEGRAL_TYPE_P (type);
7760 int final_ptr = POINTER_TYPE_P (type);
7761 int final_float = FLOAT_TYPE_P (type);
7762 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7763 unsigned int final_prec = TYPE_PRECISION (type);
7764 int final_unsignedp = TYPE_UNSIGNED (type);
7766 /* In addition to the cases of two conversions in a row
7767 handled below, if we are converting something to its own
7768 type via an object of identical or wider precision, neither
7769 conversion is needed. */
7770 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7771 && (((inter_int || inter_ptr) && final_int)
7772 || (inter_float && final_float))
7773 && inter_prec >= final_prec)
7774 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7776 /* Likewise, if the intermediate and final types are either both
7777 float or both integer, we don't need the middle conversion if
7778 it is wider than the final type and doesn't change the signedness
7779 (for integers). Avoid this if the final type is a pointer
7780 since then we sometimes need the inner conversion. Likewise if
7781 the outer has a precision not equal to the size of its mode. */
7782 if (((inter_int && inside_int)
7783 || (inter_float && inside_float)
7784 || (inter_vec && inside_vec))
7785 && inter_prec >= inside_prec
7786 && (inter_float || inter_vec
7787 || inter_unsignedp == inside_unsignedp)
7788 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7789 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7791 && (! final_vec || inter_prec == inside_prec))
7792 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7794 /* If we have a sign-extension of a zero-extended value, we can
7795 replace that by a single zero-extension. */
7796 if (inside_int && inter_int && final_int
7797 && inside_prec < inter_prec && inter_prec < final_prec
7798 && inside_unsignedp && !inter_unsignedp)
7799 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7801 /* Two conversions in a row are not needed unless:
7802 - some conversion is floating-point (overstrict for now), or
7803 - some conversion is a vector (overstrict for now), or
7804 - the intermediate type is narrower than both initial and
7806 - the intermediate type and innermost type differ in signedness,
7807 and the outermost type is wider than the intermediate, or
7808 - the initial type is a pointer type and the precisions of the
7809 intermediate and final types differ, or
7810 - the final type is a pointer type and the precisions of the
7811 initial and intermediate types differ. */
7812 if (! inside_float && ! inter_float && ! final_float
7813 && ! inside_vec && ! inter_vec && ! final_vec
7814 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7815 && ! (inside_int && inter_int
7816 && inter_unsignedp != inside_unsignedp
7817 && inter_prec < final_prec)
7818 && ((inter_unsignedp && inter_prec > inside_prec)
7819 == (final_unsignedp && final_prec > inter_prec))
7820 && ! (inside_ptr && inter_prec != final_prec)
7821 && ! (final_ptr && inside_prec != inter_prec)
7822 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7823 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7824 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7827 /* Handle (T *)&A.B.C for A being of type T and B and C
7828 living at offset zero. This occurs frequently in
7829 C++ upcasting and then accessing the base. */
7830 if (TREE_CODE (op0) == ADDR_EXPR
7831 && POINTER_TYPE_P (type)
7832 && handled_component_p (TREE_OPERAND (op0, 0)))
7834 HOST_WIDE_INT bitsize, bitpos;
7836 enum machine_mode mode;
7837 int unsignedp, volatilep;
7838 tree base = TREE_OPERAND (op0, 0);
7839 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7840 &mode, &unsignedp, &volatilep, false);
7841 /* If the reference was to a (constant) zero offset, we can use
7842 the address of the base if it has the same base type
7843 as the result type. */
7844 if (! offset && bitpos == 0
7845 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7846 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7847 return fold_convert (type, fold_addr_expr (base));
7850 if (TREE_CODE (op0) == MODIFY_EXPR
7851 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7852 /* Detect assigning a bitfield. */
7853 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7855 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7857 /* Don't leave an assignment inside a conversion
7858 unless assigning a bitfield. */
7859 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7860 /* First do the assignment, then return converted constant. */
7861 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7862 TREE_NO_WARNING (tem) = 1;
7863 TREE_USED (tem) = 1;
7867 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7868 constants (if x has signed type, the sign bit cannot be set
7869 in c). This folds extension into the BIT_AND_EXPR.
7870 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7871 very likely don't have maximal range for their precision and this
7872 transformation effectively doesn't preserve non-maximal ranges. */
7873 if (TREE_CODE (type) == INTEGER_TYPE
7874 && TREE_CODE (op0) == BIT_AND_EXPR
7875 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7878 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7881 if (TYPE_UNSIGNED (TREE_TYPE (and))
7882 || (TYPE_PRECISION (type)
7883 <= TYPE_PRECISION (TREE_TYPE (and))))
7885 else if (TYPE_PRECISION (TREE_TYPE (and1))
7886 <= HOST_BITS_PER_WIDE_INT
7887 && host_integerp (and1, 1))
7889 unsigned HOST_WIDE_INT cst;
7891 cst = tree_low_cst (and1, 1);
7892 cst &= (HOST_WIDE_INT) -1
7893 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7894 change = (cst == 0);
7895 #ifdef LOAD_EXTEND_OP
7897 && !flag_syntax_only
7898 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7901 tree uns = unsigned_type_for (TREE_TYPE (and0));
7902 and0 = fold_convert (uns, and0);
7903 and1 = fold_convert (uns, and1);
7909 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7910 TREE_INT_CST_HIGH (and1), 0,
7911 TREE_OVERFLOW (and1));
7912 return fold_build2 (BIT_AND_EXPR, type,
7913 fold_convert (type, and0), tem);
7917 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7918 when one of the new casts will fold away. Conservatively we assume
7919 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7920 if (POINTER_TYPE_P (type)
7921 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7922 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7923 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7924 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7926 tree arg00 = TREE_OPERAND (arg0, 0);
7927 tree arg01 = TREE_OPERAND (arg0, 1);
7929 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7930 fold_convert (sizetype, arg01));
7933 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7934 of the same precision, and X is an integer type not narrower than
7935 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7936 if (INTEGRAL_TYPE_P (type)
7937 && TREE_CODE (op0) == BIT_NOT_EXPR
7938 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7939 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7940 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7942 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7943 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7944 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7945 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7948 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7949 type of X and Y (integer types only). */
7950 if (INTEGRAL_TYPE_P (type)
7951 && TREE_CODE (op0) == MULT_EXPR
7952 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7953 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7955 /* Be careful not to introduce new overflows. */
7957 if (TYPE_OVERFLOW_WRAPS (type))
7960 mult_type = unsigned_type_for (type);
7962 tem = fold_build2 (MULT_EXPR, mult_type,
7963 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7964 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7965 return fold_convert (type, tem);
7968 tem = fold_convert_const (code, type, op0);
7969 return tem ? tem : NULL_TREE;
7971 case FIXED_CONVERT_EXPR:
7972 tem = fold_convert_const (code, type, arg0);
7973 return tem ? tem : NULL_TREE;
7975 case VIEW_CONVERT_EXPR:
7976 if (TREE_TYPE (op0) == type)
7978 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7979 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7981 /* For integral conversions with the same precision or pointer
7982 conversions use a NOP_EXPR instead. */
7983 if ((INTEGRAL_TYPE_P (type)
7984 || POINTER_TYPE_P (type))
7985 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7986 || POINTER_TYPE_P (TREE_TYPE (op0)))
7987 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
7988 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
7989 a sub-type to its base type as generated by the Ada FE. */
7990 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
7991 && TREE_TYPE (TREE_TYPE (op0))))
7992 return fold_convert (type, op0);
7994 /* Strip inner integral conversions that do not change the precision. */
7995 if (CONVERT_EXPR_P (op0)
7996 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7997 || POINTER_TYPE_P (TREE_TYPE (op0)))
7998 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7999 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8000 && (TYPE_PRECISION (TREE_TYPE (op0))
8001 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8002 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8004 return fold_view_convert_expr (type, op0);
8007 tem = fold_negate_expr (arg0);
8009 return fold_convert (type, tem);
8013 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8014 return fold_abs_const (arg0, type);
8015 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8016 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8017 /* Convert fabs((double)float) into (double)fabsf(float). */
8018 else if (TREE_CODE (arg0) == NOP_EXPR
8019 && TREE_CODE (type) == REAL_TYPE)
8021 tree targ0 = strip_float_extensions (arg0);
8023 return fold_convert (type, fold_build1 (ABS_EXPR,
8027 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8028 else if (TREE_CODE (arg0) == ABS_EXPR)
8030 else if (tree_expr_nonnegative_p (arg0))
8033 /* Strip sign ops from argument. */
8034 if (TREE_CODE (type) == REAL_TYPE)
8036 tem = fold_strip_sign_ops (arg0);
8038 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8043 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8044 return fold_convert (type, arg0);
8045 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8047 tree itype = TREE_TYPE (type);
8048 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8049 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8050 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8052 if (TREE_CODE (arg0) == COMPLEX_CST)
8054 tree itype = TREE_TYPE (type);
8055 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8056 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8057 return build_complex (type, rpart, negate_expr (ipart));
8059 if (TREE_CODE (arg0) == CONJ_EXPR)
8060 return fold_convert (type, TREE_OPERAND (arg0, 0));
8064 if (TREE_CODE (arg0) == INTEGER_CST)
8065 return fold_not_const (arg0, type);
8066 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8067 return fold_convert (type, TREE_OPERAND (arg0, 0));
8068 /* Convert ~ (-A) to A - 1. */
8069 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8070 return fold_build2 (MINUS_EXPR, type,
8071 fold_convert (type, TREE_OPERAND (arg0, 0)),
8072 build_int_cst (type, 1));
8073 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8074 else if (INTEGRAL_TYPE_P (type)
8075 && ((TREE_CODE (arg0) == MINUS_EXPR
8076 && integer_onep (TREE_OPERAND (arg0, 1)))
8077 || (TREE_CODE (arg0) == PLUS_EXPR
8078 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8079 return fold_build1 (NEGATE_EXPR, type,
8080 fold_convert (type, TREE_OPERAND (arg0, 0)));
8081 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8082 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8083 && (tem = fold_unary (BIT_NOT_EXPR, type,
8085 TREE_OPERAND (arg0, 0)))))
8086 return fold_build2 (BIT_XOR_EXPR, type, tem,
8087 fold_convert (type, TREE_OPERAND (arg0, 1)));
8088 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8089 && (tem = fold_unary (BIT_NOT_EXPR, type,
8091 TREE_OPERAND (arg0, 1)))))
8092 return fold_build2 (BIT_XOR_EXPR, type,
8093 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8094 /* Perform BIT_NOT_EXPR on each element individually. */
8095 else if (TREE_CODE (arg0) == VECTOR_CST)
8097 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8098 int count = TYPE_VECTOR_SUBPARTS (type), i;
8100 for (i = 0; i < count; i++)
8104 elem = TREE_VALUE (elements);
8105 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8106 if (elem == NULL_TREE)
8108 elements = TREE_CHAIN (elements);
8111 elem = build_int_cst (TREE_TYPE (type), -1);
8112 list = tree_cons (NULL_TREE, elem, list);
8115 return build_vector (type, nreverse (list));
8120 case TRUTH_NOT_EXPR:
8121 /* The argument to invert_truthvalue must have Boolean type. */
8122 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8123 arg0 = fold_convert (boolean_type_node, arg0);
8125 /* Note that the operand of this must be an int
8126 and its values must be 0 or 1.
8127 ("true" is a fixed value perhaps depending on the language,
8128 but we don't handle values other than 1 correctly yet.) */
8129 tem = fold_truth_not_expr (arg0);
8132 return fold_convert (type, tem);
8135 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8136 return fold_convert (type, arg0);
8137 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8138 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8139 TREE_OPERAND (arg0, 1));
8140 if (TREE_CODE (arg0) == COMPLEX_CST)
8141 return fold_convert (type, TREE_REALPART (arg0));
8142 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8144 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8145 tem = fold_build2 (TREE_CODE (arg0), itype,
8146 fold_build1 (REALPART_EXPR, itype,
8147 TREE_OPERAND (arg0, 0)),
8148 fold_build1 (REALPART_EXPR, itype,
8149 TREE_OPERAND (arg0, 1)));
8150 return fold_convert (type, tem);
8152 if (TREE_CODE (arg0) == CONJ_EXPR)
8154 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8155 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8156 return fold_convert (type, tem);
8158 if (TREE_CODE (arg0) == CALL_EXPR)
8160 tree fn = get_callee_fndecl (arg0);
8161 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8162 switch (DECL_FUNCTION_CODE (fn))
8164 CASE_FLT_FN (BUILT_IN_CEXPI):
8165 fn = mathfn_built_in (type, BUILT_IN_COS);
8167 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8177 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8178 return fold_convert (type, integer_zero_node);
8179 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8180 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8181 TREE_OPERAND (arg0, 0));
8182 if (TREE_CODE (arg0) == COMPLEX_CST)
8183 return fold_convert (type, TREE_IMAGPART (arg0));
8184 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8186 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8187 tem = fold_build2 (TREE_CODE (arg0), itype,
8188 fold_build1 (IMAGPART_EXPR, itype,
8189 TREE_OPERAND (arg0, 0)),
8190 fold_build1 (IMAGPART_EXPR, itype,
8191 TREE_OPERAND (arg0, 1)));
8192 return fold_convert (type, tem);
8194 if (TREE_CODE (arg0) == CONJ_EXPR)
8196 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8197 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8198 return fold_convert (type, negate_expr (tem));
8200 if (TREE_CODE (arg0) == CALL_EXPR)
8202 tree fn = get_callee_fndecl (arg0);
8203 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8204 switch (DECL_FUNCTION_CODE (fn))
8206 CASE_FLT_FN (BUILT_IN_CEXPI):
8207 fn = mathfn_built_in (type, BUILT_IN_SIN);
8209 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8220 } /* switch (code) */
8223 /* Fold a binary expression of code CODE and type TYPE with operands
8224 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8225 Return the folded expression if folding is successful. Otherwise,
8226 return NULL_TREE. */
8229 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8231 enum tree_code compl_code;
8233 if (code == MIN_EXPR)
8234 compl_code = MAX_EXPR;
8235 else if (code == MAX_EXPR)
8236 compl_code = MIN_EXPR;
8240 /* MIN (MAX (a, b), b) == b. */
8241 if (TREE_CODE (op0) == compl_code
8242 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8243 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8245 /* MIN (MAX (b, a), b) == b. */
8246 if (TREE_CODE (op0) == compl_code
8247 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8248 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8249 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8251 /* MIN (a, MAX (a, b)) == a. */
8252 if (TREE_CODE (op1) == compl_code
8253 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8254 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8255 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8257 /* MIN (a, MAX (b, a)) == a. */
8258 if (TREE_CODE (op1) == compl_code
8259 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8260 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8261 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8266 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8267 by changing CODE to reduce the magnitude of constants involved in
8268 ARG0 of the comparison.
8269 Returns a canonicalized comparison tree if a simplification was
8270 possible, otherwise returns NULL_TREE.
8271 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8272 valid if signed overflow is undefined. */
8275 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8276 tree arg0, tree arg1,
8277 bool *strict_overflow_p)
8279 enum tree_code code0 = TREE_CODE (arg0);
8280 tree t, cst0 = NULL_TREE;
8284 /* Match A +- CST code arg1 and CST code arg1. */
8285 if (!(((code0 == MINUS_EXPR
8286 || code0 == PLUS_EXPR)
8287 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8288 || code0 == INTEGER_CST))
8291 /* Identify the constant in arg0 and its sign. */
8292 if (code0 == INTEGER_CST)
8295 cst0 = TREE_OPERAND (arg0, 1);
8296 sgn0 = tree_int_cst_sgn (cst0);
8298 /* Overflowed constants and zero will cause problems. */
8299 if (integer_zerop (cst0)
8300 || TREE_OVERFLOW (cst0))
8303 /* See if we can reduce the magnitude of the constant in
8304 arg0 by changing the comparison code. */
8305 if (code0 == INTEGER_CST)
8307 /* CST <= arg1 -> CST-1 < arg1. */
8308 if (code == LE_EXPR && sgn0 == 1)
8310 /* -CST < arg1 -> -CST-1 <= arg1. */
8311 else if (code == LT_EXPR && sgn0 == -1)
8313 /* CST > arg1 -> CST-1 >= arg1. */
8314 else if (code == GT_EXPR && sgn0 == 1)
8316 /* -CST >= arg1 -> -CST-1 > arg1. */
8317 else if (code == GE_EXPR && sgn0 == -1)
8321 /* arg1 code' CST' might be more canonical. */
8326 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8328 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8330 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8331 else if (code == GT_EXPR
8332 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8334 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8335 else if (code == LE_EXPR
8336 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8338 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8339 else if (code == GE_EXPR
8340 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8344 *strict_overflow_p = true;
8347 /* Now build the constant reduced in magnitude. */
8348 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8349 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8350 if (code0 != INTEGER_CST)
8351 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8353 /* If swapping might yield to a more canonical form, do so. */
8355 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8357 return fold_build2 (code, type, t, arg1);
8360 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8361 overflow further. Try to decrease the magnitude of constants involved
8362 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8363 and put sole constants at the second argument position.
8364 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8367 maybe_canonicalize_comparison (enum tree_code code, tree type,
8368 tree arg0, tree arg1)
8371 bool strict_overflow_p;
8372 const char * const warnmsg = G_("assuming signed overflow does not occur "
8373 "when reducing constant in comparison");
8375 /* In principle pointers also have undefined overflow behavior,
8376 but that causes problems elsewhere. */
8377 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8378 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8381 /* Try canonicalization by simplifying arg0. */
8382 strict_overflow_p = false;
8383 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8384 &strict_overflow_p);
8387 if (strict_overflow_p)
8388 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8392 /* Try canonicalization by simplifying arg1 using the swapped
8394 code = swap_tree_comparison (code);
8395 strict_overflow_p = false;
8396 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8397 &strict_overflow_p);
8398 if (t && strict_overflow_p)
8399 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8403 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8404 space. This is used to avoid issuing overflow warnings for
8405 expressions like &p->x which can not wrap. */
8408 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8410 unsigned HOST_WIDE_INT offset_low, total_low;
8411 HOST_WIDE_INT size, offset_high, total_high;
8413 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8419 if (offset == NULL_TREE)
8424 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8428 offset_low = TREE_INT_CST_LOW (offset);
8429 offset_high = TREE_INT_CST_HIGH (offset);
8432 if (add_double_with_sign (offset_low, offset_high,
8433 bitpos / BITS_PER_UNIT, 0,
8434 &total_low, &total_high,
8438 if (total_high != 0)
8441 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8445 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8447 if (TREE_CODE (base) == ADDR_EXPR)
8449 HOST_WIDE_INT base_size;
8451 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8452 if (base_size > 0 && size < base_size)
8456 return total_low > (unsigned HOST_WIDE_INT) size;
8459 /* Subroutine of fold_binary. This routine performs all of the
8460 transformations that are common to the equality/inequality
8461 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8462 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8463 fold_binary should call fold_binary. Fold a comparison with
8464 tree code CODE and type TYPE with operands OP0 and OP1. Return
8465 the folded comparison or NULL_TREE. */
8468 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8470 tree arg0, arg1, tem;
8475 STRIP_SIGN_NOPS (arg0);
8476 STRIP_SIGN_NOPS (arg1);
8478 tem = fold_relational_const (code, type, arg0, arg1);
8479 if (tem != NULL_TREE)
8482 /* If one arg is a real or integer constant, put it last. */
8483 if (tree_swap_operands_p (arg0, arg1, true))
8484 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8486 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8487 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8488 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8489 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8490 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8491 && (TREE_CODE (arg1) == INTEGER_CST
8492 && !TREE_OVERFLOW (arg1)))
8494 tree const1 = TREE_OPERAND (arg0, 1);
8496 tree variable = TREE_OPERAND (arg0, 0);
8499 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8501 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8502 TREE_TYPE (arg1), const2, const1);
8504 /* If the constant operation overflowed this can be
8505 simplified as a comparison against INT_MAX/INT_MIN. */
8506 if (TREE_CODE (lhs) == INTEGER_CST
8507 && TREE_OVERFLOW (lhs))
8509 int const1_sgn = tree_int_cst_sgn (const1);
8510 enum tree_code code2 = code;
8512 /* Get the sign of the constant on the lhs if the
8513 operation were VARIABLE + CONST1. */
8514 if (TREE_CODE (arg0) == MINUS_EXPR)
8515 const1_sgn = -const1_sgn;
8517 /* The sign of the constant determines if we overflowed
8518 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8519 Canonicalize to the INT_MIN overflow by swapping the comparison
8521 if (const1_sgn == -1)
8522 code2 = swap_tree_comparison (code);
8524 /* We now can look at the canonicalized case
8525 VARIABLE + 1 CODE2 INT_MIN
8526 and decide on the result. */
8527 if (code2 == LT_EXPR
8529 || code2 == EQ_EXPR)
8530 return omit_one_operand (type, boolean_false_node, variable);
8531 else if (code2 == NE_EXPR
8533 || code2 == GT_EXPR)
8534 return omit_one_operand (type, boolean_true_node, variable);
8537 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8538 && (TREE_CODE (lhs) != INTEGER_CST
8539 || !TREE_OVERFLOW (lhs)))
8541 fold_overflow_warning (("assuming signed overflow does not occur "
8542 "when changing X +- C1 cmp C2 to "
8544 WARN_STRICT_OVERFLOW_COMPARISON);
8545 return fold_build2 (code, type, variable, lhs);
8549 /* For comparisons of pointers we can decompose it to a compile time
8550 comparison of the base objects and the offsets into the object.
8551 This requires at least one operand being an ADDR_EXPR or a
8552 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8553 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8554 && (TREE_CODE (arg0) == ADDR_EXPR
8555 || TREE_CODE (arg1) == ADDR_EXPR
8556 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8557 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8559 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8560 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8561 enum machine_mode mode;
8562 int volatilep, unsignedp;
8563 bool indirect_base0 = false, indirect_base1 = false;
8565 /* Get base and offset for the access. Strip ADDR_EXPR for
8566 get_inner_reference, but put it back by stripping INDIRECT_REF
8567 off the base object if possible. indirect_baseN will be true
8568 if baseN is not an address but refers to the object itself. */
8570 if (TREE_CODE (arg0) == ADDR_EXPR)
8572 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8573 &bitsize, &bitpos0, &offset0, &mode,
8574 &unsignedp, &volatilep, false);
8575 if (TREE_CODE (base0) == INDIRECT_REF)
8576 base0 = TREE_OPERAND (base0, 0);
8578 indirect_base0 = true;
8580 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8582 base0 = TREE_OPERAND (arg0, 0);
8583 offset0 = TREE_OPERAND (arg0, 1);
8587 if (TREE_CODE (arg1) == ADDR_EXPR)
8589 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8590 &bitsize, &bitpos1, &offset1, &mode,
8591 &unsignedp, &volatilep, false);
8592 if (TREE_CODE (base1) == INDIRECT_REF)
8593 base1 = TREE_OPERAND (base1, 0);
8595 indirect_base1 = true;
8597 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8599 base1 = TREE_OPERAND (arg1, 0);
8600 offset1 = TREE_OPERAND (arg1, 1);
8603 /* If we have equivalent bases we might be able to simplify. */
8604 if (indirect_base0 == indirect_base1
8605 && operand_equal_p (base0, base1, 0))
8607 /* We can fold this expression to a constant if the non-constant
8608 offset parts are equal. */
8609 if ((offset0 == offset1
8610 || (offset0 && offset1
8611 && operand_equal_p (offset0, offset1, 0)))
8614 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8619 && bitpos0 != bitpos1
8620 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8621 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8622 fold_overflow_warning (("assuming pointer wraparound does not "
8623 "occur when comparing P +- C1 with "
8625 WARN_STRICT_OVERFLOW_CONDITIONAL);
8630 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8632 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8634 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8636 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8638 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8640 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8644 /* We can simplify the comparison to a comparison of the variable
8645 offset parts if the constant offset parts are equal.
8646 Be careful to use signed size type here because otherwise we
8647 mess with array offsets in the wrong way. This is possible
8648 because pointer arithmetic is restricted to retain within an
8649 object and overflow on pointer differences is undefined as of
8650 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8651 else if (bitpos0 == bitpos1
8652 && ((code == EQ_EXPR || code == NE_EXPR)
8653 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8655 tree signed_size_type_node;
8656 signed_size_type_node = signed_type_for (size_type_node);
8658 /* By converting to signed size type we cover middle-end pointer
8659 arithmetic which operates on unsigned pointer types of size
8660 type size and ARRAY_REF offsets which are properly sign or
8661 zero extended from their type in case it is narrower than
8663 if (offset0 == NULL_TREE)
8664 offset0 = build_int_cst (signed_size_type_node, 0);
8666 offset0 = fold_convert (signed_size_type_node, offset0);
8667 if (offset1 == NULL_TREE)
8668 offset1 = build_int_cst (signed_size_type_node, 0);
8670 offset1 = fold_convert (signed_size_type_node, offset1);
8674 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8675 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8676 fold_overflow_warning (("assuming pointer wraparound does not "
8677 "occur when comparing P +- C1 with "
8679 WARN_STRICT_OVERFLOW_COMPARISON);
8681 return fold_build2 (code, type, offset0, offset1);
8684 /* For non-equal bases we can simplify if they are addresses
8685 of local binding decls or constants. */
8686 else if (indirect_base0 && indirect_base1
8687 /* We know that !operand_equal_p (base0, base1, 0)
8688 because the if condition was false. But make
8689 sure two decls are not the same. */
8691 && TREE_CODE (arg0) == ADDR_EXPR
8692 && TREE_CODE (arg1) == ADDR_EXPR
8693 && (((TREE_CODE (base0) == VAR_DECL
8694 || TREE_CODE (base0) == PARM_DECL)
8695 && (targetm.binds_local_p (base0)
8696 || CONSTANT_CLASS_P (base1)))
8697 || CONSTANT_CLASS_P (base0))
8698 && (((TREE_CODE (base1) == VAR_DECL
8699 || TREE_CODE (base1) == PARM_DECL)
8700 && (targetm.binds_local_p (base1)
8701 || CONSTANT_CLASS_P (base0)))
8702 || CONSTANT_CLASS_P (base1)))
8704 if (code == EQ_EXPR)
8705 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8706 else if (code == NE_EXPR)
8707 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8709 /* For equal offsets we can simplify to a comparison of the
8711 else if (bitpos0 == bitpos1
8713 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8715 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8716 && ((offset0 == offset1)
8717 || (offset0 && offset1
8718 && operand_equal_p (offset0, offset1, 0))))
8721 base0 = fold_addr_expr (base0);
8723 base1 = fold_addr_expr (base1);
8724 return fold_build2 (code, type, base0, base1);
8728 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8729 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8730 the resulting offset is smaller in absolute value than the
8732 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8733 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8734 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8735 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8736 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8737 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8738 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8740 tree const1 = TREE_OPERAND (arg0, 1);
8741 tree const2 = TREE_OPERAND (arg1, 1);
8742 tree variable1 = TREE_OPERAND (arg0, 0);
8743 tree variable2 = TREE_OPERAND (arg1, 0);
8745 const char * const warnmsg = G_("assuming signed overflow does not "
8746 "occur when combining constants around "
8749 /* Put the constant on the side where it doesn't overflow and is
8750 of lower absolute value than before. */
8751 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8752 ? MINUS_EXPR : PLUS_EXPR,
8754 if (!TREE_OVERFLOW (cst)
8755 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8757 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8758 return fold_build2 (code, type,
8760 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8764 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8765 ? MINUS_EXPR : PLUS_EXPR,
8767 if (!TREE_OVERFLOW (cst)
8768 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8770 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8771 return fold_build2 (code, type,
8772 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8778 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8779 signed arithmetic case. That form is created by the compiler
8780 often enough for folding it to be of value. One example is in
8781 computing loop trip counts after Operator Strength Reduction. */
8782 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8783 && TREE_CODE (arg0) == MULT_EXPR
8784 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8785 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8786 && integer_zerop (arg1))
8788 tree const1 = TREE_OPERAND (arg0, 1);
8789 tree const2 = arg1; /* zero */
8790 tree variable1 = TREE_OPERAND (arg0, 0);
8791 enum tree_code cmp_code = code;
8793 gcc_assert (!integer_zerop (const1));
8795 fold_overflow_warning (("assuming signed overflow does not occur when "
8796 "eliminating multiplication in comparison "
8798 WARN_STRICT_OVERFLOW_COMPARISON);
8800 /* If const1 is negative we swap the sense of the comparison. */
8801 if (tree_int_cst_sgn (const1) < 0)
8802 cmp_code = swap_tree_comparison (cmp_code);
8804 return fold_build2 (cmp_code, type, variable1, const2);
8807 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8811 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8813 tree targ0 = strip_float_extensions (arg0);
8814 tree targ1 = strip_float_extensions (arg1);
8815 tree newtype = TREE_TYPE (targ0);
8817 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8818 newtype = TREE_TYPE (targ1);
8820 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8821 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8822 return fold_build2 (code, type, fold_convert (newtype, targ0),
8823 fold_convert (newtype, targ1));
8825 /* (-a) CMP (-b) -> b CMP a */
8826 if (TREE_CODE (arg0) == NEGATE_EXPR
8827 && TREE_CODE (arg1) == NEGATE_EXPR)
8828 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8829 TREE_OPERAND (arg0, 0));
8831 if (TREE_CODE (arg1) == REAL_CST)
8833 REAL_VALUE_TYPE cst;
8834 cst = TREE_REAL_CST (arg1);
8836 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8837 if (TREE_CODE (arg0) == NEGATE_EXPR)
8838 return fold_build2 (swap_tree_comparison (code), type,
8839 TREE_OPERAND (arg0, 0),
8840 build_real (TREE_TYPE (arg1),
8841 REAL_VALUE_NEGATE (cst)));
8843 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8844 /* a CMP (-0) -> a CMP 0 */
8845 if (REAL_VALUE_MINUS_ZERO (cst))
8846 return fold_build2 (code, type, arg0,
8847 build_real (TREE_TYPE (arg1), dconst0));
8849 /* x != NaN is always true, other ops are always false. */
8850 if (REAL_VALUE_ISNAN (cst)
8851 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8853 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8854 return omit_one_operand (type, tem, arg0);
8857 /* Fold comparisons against infinity. */
8858 if (REAL_VALUE_ISINF (cst))
8860 tem = fold_inf_compare (code, type, arg0, arg1);
8861 if (tem != NULL_TREE)
8866 /* If this is a comparison of a real constant with a PLUS_EXPR
8867 or a MINUS_EXPR of a real constant, we can convert it into a
8868 comparison with a revised real constant as long as no overflow
8869 occurs when unsafe_math_optimizations are enabled. */
8870 if (flag_unsafe_math_optimizations
8871 && TREE_CODE (arg1) == REAL_CST
8872 && (TREE_CODE (arg0) == PLUS_EXPR
8873 || TREE_CODE (arg0) == MINUS_EXPR)
8874 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8875 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8876 ? MINUS_EXPR : PLUS_EXPR,
8877 arg1, TREE_OPERAND (arg0, 1), 0))
8878 && !TREE_OVERFLOW (tem))
8879 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8881 /* Likewise, we can simplify a comparison of a real constant with
8882 a MINUS_EXPR whose first operand is also a real constant, i.e.
8883 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8884 floating-point types only if -fassociative-math is set. */
8885 if (flag_associative_math
8886 && TREE_CODE (arg1) == REAL_CST
8887 && TREE_CODE (arg0) == MINUS_EXPR
8888 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8889 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8891 && !TREE_OVERFLOW (tem))
8892 return fold_build2 (swap_tree_comparison (code), type,
8893 TREE_OPERAND (arg0, 1), tem);
8895 /* Fold comparisons against built-in math functions. */
8896 if (TREE_CODE (arg1) == REAL_CST
8897 && flag_unsafe_math_optimizations
8898 && ! flag_errno_math)
8900 enum built_in_function fcode = builtin_mathfn_code (arg0);
8902 if (fcode != END_BUILTINS)
8904 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8905 if (tem != NULL_TREE)
8911 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8912 && CONVERT_EXPR_P (arg0))
8914 /* If we are widening one operand of an integer comparison,
8915 see if the other operand is similarly being widened. Perhaps we
8916 can do the comparison in the narrower type. */
8917 tem = fold_widened_comparison (code, type, arg0, arg1);
8921 /* Or if we are changing signedness. */
8922 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8927 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8928 constant, we can simplify it. */
8929 if (TREE_CODE (arg1) == INTEGER_CST
8930 && (TREE_CODE (arg0) == MIN_EXPR
8931 || TREE_CODE (arg0) == MAX_EXPR)
8932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8934 tem = optimize_minmax_comparison (code, type, op0, op1);
8939 /* Simplify comparison of something with itself. (For IEEE
8940 floating-point, we can only do some of these simplifications.) */
8941 if (operand_equal_p (arg0, arg1, 0))
8946 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8947 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8948 return constant_boolean_node (1, type);
8953 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8954 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8955 return constant_boolean_node (1, type);
8956 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8959 /* For NE, we can only do this simplification if integer
8960 or we don't honor IEEE floating point NaNs. */
8961 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8962 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8964 /* ... fall through ... */
8967 return constant_boolean_node (0, type);
8973 /* If we are comparing an expression that just has comparisons
8974 of two integer values, arithmetic expressions of those comparisons,
8975 and constants, we can simplify it. There are only three cases
8976 to check: the two values can either be equal, the first can be
8977 greater, or the second can be greater. Fold the expression for
8978 those three values. Since each value must be 0 or 1, we have
8979 eight possibilities, each of which corresponds to the constant 0
8980 or 1 or one of the six possible comparisons.
8982 This handles common cases like (a > b) == 0 but also handles
8983 expressions like ((x > y) - (y > x)) > 0, which supposedly
8984 occur in macroized code. */
8986 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8988 tree cval1 = 0, cval2 = 0;
8991 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8992 /* Don't handle degenerate cases here; they should already
8993 have been handled anyway. */
8994 && cval1 != 0 && cval2 != 0
8995 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8996 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8997 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8998 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8999 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9000 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9001 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9003 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9004 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9006 /* We can't just pass T to eval_subst in case cval1 or cval2
9007 was the same as ARG1. */
9010 = fold_build2 (code, type,
9011 eval_subst (arg0, cval1, maxval,
9015 = fold_build2 (code, type,
9016 eval_subst (arg0, cval1, maxval,
9020 = fold_build2 (code, type,
9021 eval_subst (arg0, cval1, minval,
9025 /* All three of these results should be 0 or 1. Confirm they are.
9026 Then use those values to select the proper code to use. */
9028 if (TREE_CODE (high_result) == INTEGER_CST
9029 && TREE_CODE (equal_result) == INTEGER_CST
9030 && TREE_CODE (low_result) == INTEGER_CST)
9032 /* Make a 3-bit mask with the high-order bit being the
9033 value for `>', the next for '=', and the low for '<'. */
9034 switch ((integer_onep (high_result) * 4)
9035 + (integer_onep (equal_result) * 2)
9036 + integer_onep (low_result))
9040 return omit_one_operand (type, integer_zero_node, arg0);
9061 return omit_one_operand (type, integer_one_node, arg0);
9065 return save_expr (build2 (code, type, cval1, cval2));
9066 return fold_build2 (code, type, cval1, cval2);
9071 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9072 into a single range test. */
9073 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9074 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9075 && TREE_CODE (arg1) == INTEGER_CST
9076 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9077 && !integer_zerop (TREE_OPERAND (arg0, 1))
9078 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9079 && !TREE_OVERFLOW (arg1))
9081 tem = fold_div_compare (code, type, arg0, arg1);
9082 if (tem != NULL_TREE)
9086 /* Fold ~X op ~Y as Y op X. */
9087 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9088 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9090 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9091 return fold_build2 (code, type,
9092 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9093 TREE_OPERAND (arg0, 0));
9096 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9097 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9098 && TREE_CODE (arg1) == INTEGER_CST)
9100 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9101 return fold_build2 (swap_tree_comparison (code), type,
9102 TREE_OPERAND (arg0, 0),
9103 fold_build1 (BIT_NOT_EXPR, cmp_type,
9104 fold_convert (cmp_type, arg1)));
9111 /* Subroutine of fold_binary. Optimize complex multiplications of the
9112 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9113 argument EXPR represents the expression "z" of type TYPE. */
9116 fold_mult_zconjz (tree type, tree expr)
9118 tree itype = TREE_TYPE (type);
9119 tree rpart, ipart, tem;
9121 if (TREE_CODE (expr) == COMPLEX_EXPR)
9123 rpart = TREE_OPERAND (expr, 0);
9124 ipart = TREE_OPERAND (expr, 1);
9126 else if (TREE_CODE (expr) == COMPLEX_CST)
9128 rpart = TREE_REALPART (expr);
9129 ipart = TREE_IMAGPART (expr);
9133 expr = save_expr (expr);
9134 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9135 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9138 rpart = save_expr (rpart);
9139 ipart = save_expr (ipart);
9140 tem = fold_build2 (PLUS_EXPR, itype,
9141 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9142 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9143 return fold_build2 (COMPLEX_EXPR, type, tem,
9144 fold_convert (itype, integer_zero_node));
9148 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9149 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9150 guarantees that P and N have the same least significant log2(M) bits.
9151 N is not otherwise constrained. In particular, N is not normalized to
9152 0 <= N < M as is common. In general, the precise value of P is unknown.
9153 M is chosen as large as possible such that constant N can be determined.
9155 Returns M and sets *RESIDUE to N. */
9157 static unsigned HOST_WIDE_INT
9158 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9160 enum tree_code code;
9164 code = TREE_CODE (expr);
9165 if (code == ADDR_EXPR)
9167 expr = TREE_OPERAND (expr, 0);
9168 if (handled_component_p (expr))
9170 HOST_WIDE_INT bitsize, bitpos;
9172 enum machine_mode mode;
9173 int unsignedp, volatilep;
9175 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9176 &mode, &unsignedp, &volatilep, false);
9177 *residue = bitpos / BITS_PER_UNIT;
9180 if (TREE_CODE (offset) == INTEGER_CST)
9181 *residue += TREE_INT_CST_LOW (offset);
9183 /* We don't handle more complicated offset expressions. */
9188 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9189 return DECL_ALIGN_UNIT (expr);
9191 else if (code == POINTER_PLUS_EXPR)
9194 unsigned HOST_WIDE_INT modulus;
9195 enum tree_code inner_code;
9197 op0 = TREE_OPERAND (expr, 0);
9199 modulus = get_pointer_modulus_and_residue (op0, residue);
9201 op1 = TREE_OPERAND (expr, 1);
9203 inner_code = TREE_CODE (op1);
9204 if (inner_code == INTEGER_CST)
9206 *residue += TREE_INT_CST_LOW (op1);
9209 else if (inner_code == MULT_EXPR)
9211 op1 = TREE_OPERAND (op1, 1);
9212 if (TREE_CODE (op1) == INTEGER_CST)
9214 unsigned HOST_WIDE_INT align;
9216 /* Compute the greatest power-of-2 divisor of op1. */
9217 align = TREE_INT_CST_LOW (op1);
9220 /* If align is non-zero and less than *modulus, replace
9221 *modulus with align., If align is 0, then either op1 is 0
9222 or the greatest power-of-2 divisor of op1 doesn't fit in an
9223 unsigned HOST_WIDE_INT. In either case, no additional
9224 constraint is imposed. */
9226 modulus = MIN (modulus, align);
9233 /* If we get here, we were unable to determine anything useful about the
9239 /* Fold a binary expression of code CODE and type TYPE with operands
9240 OP0 and OP1. Return the folded expression if folding is
9241 successful. Otherwise, return NULL_TREE. */
9244 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9246 enum tree_code_class kind = TREE_CODE_CLASS (code);
9247 tree arg0, arg1, tem;
9248 tree t1 = NULL_TREE;
9249 bool strict_overflow_p;
9251 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9252 && TREE_CODE_LENGTH (code) == 2
9254 && op1 != NULL_TREE);
9259 /* Strip any conversions that don't change the mode. This is
9260 safe for every expression, except for a comparison expression
9261 because its signedness is derived from its operands. So, in
9262 the latter case, only strip conversions that don't change the
9263 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9266 Note that this is done as an internal manipulation within the
9267 constant folder, in order to find the simplest representation
9268 of the arguments so that their form can be studied. In any
9269 cases, the appropriate type conversions should be put back in
9270 the tree that will get out of the constant folder. */
9272 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9274 STRIP_SIGN_NOPS (arg0);
9275 STRIP_SIGN_NOPS (arg1);
9283 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9284 constant but we can't do arithmetic on them. */
9285 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9286 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9287 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9288 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9289 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9290 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9292 if (kind == tcc_binary)
9294 /* Make sure type and arg0 have the same saturating flag. */
9295 gcc_assert (TYPE_SATURATING (type)
9296 == TYPE_SATURATING (TREE_TYPE (arg0)));
9297 tem = const_binop (code, arg0, arg1, 0);
9299 else if (kind == tcc_comparison)
9300 tem = fold_relational_const (code, type, arg0, arg1);
9304 if (tem != NULL_TREE)
9306 if (TREE_TYPE (tem) != type)
9307 tem = fold_convert (type, tem);
9312 /* If this is a commutative operation, and ARG0 is a constant, move it
9313 to ARG1 to reduce the number of tests below. */
9314 if (commutative_tree_code (code)
9315 && tree_swap_operands_p (arg0, arg1, true))
9316 return fold_build2 (code, type, op1, op0);
9318 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9320 First check for cases where an arithmetic operation is applied to a
9321 compound, conditional, or comparison operation. Push the arithmetic
9322 operation inside the compound or conditional to see if any folding
9323 can then be done. Convert comparison to conditional for this purpose.
9324 The also optimizes non-constant cases that used to be done in
9327 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9328 one of the operands is a comparison and the other is a comparison, a
9329 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9330 code below would make the expression more complex. Change it to a
9331 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9332 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9334 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9335 || code == EQ_EXPR || code == NE_EXPR)
9336 && ((truth_value_p (TREE_CODE (arg0))
9337 && (truth_value_p (TREE_CODE (arg1))
9338 || (TREE_CODE (arg1) == BIT_AND_EXPR
9339 && integer_onep (TREE_OPERAND (arg1, 1)))))
9340 || (truth_value_p (TREE_CODE (arg1))
9341 && (truth_value_p (TREE_CODE (arg0))
9342 || (TREE_CODE (arg0) == BIT_AND_EXPR
9343 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9345 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9346 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9349 fold_convert (boolean_type_node, arg0),
9350 fold_convert (boolean_type_node, arg1));
9352 if (code == EQ_EXPR)
9353 tem = invert_truthvalue (tem);
9355 return fold_convert (type, tem);
9358 if (TREE_CODE_CLASS (code) == tcc_binary
9359 || TREE_CODE_CLASS (code) == tcc_comparison)
9361 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9362 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9363 fold_build2 (code, type,
9364 fold_convert (TREE_TYPE (op0),
9365 TREE_OPERAND (arg0, 1)),
9367 if (TREE_CODE (arg1) == COMPOUND_EXPR
9368 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9369 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9370 fold_build2 (code, type, op0,
9371 fold_convert (TREE_TYPE (op1),
9372 TREE_OPERAND (arg1, 1))));
9374 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9376 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9378 /*cond_first_p=*/1);
9379 if (tem != NULL_TREE)
9383 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9385 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9387 /*cond_first_p=*/0);
9388 if (tem != NULL_TREE)
9395 case POINTER_PLUS_EXPR:
9396 /* 0 +p index -> (type)index */
9397 if (integer_zerop (arg0))
9398 return non_lvalue (fold_convert (type, arg1));
9400 /* PTR +p 0 -> PTR */
9401 if (integer_zerop (arg1))
9402 return non_lvalue (fold_convert (type, arg0));
9404 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9405 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9406 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9407 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9408 fold_convert (sizetype, arg1),
9409 fold_convert (sizetype, arg0)));
9411 /* index +p PTR -> PTR +p index */
9412 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9413 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9414 return fold_build2 (POINTER_PLUS_EXPR, type,
9415 fold_convert (type, arg1),
9416 fold_convert (sizetype, arg0));
9418 /* (PTR +p B) +p A -> PTR +p (B + A) */
9419 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9422 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9423 tree arg00 = TREE_OPERAND (arg0, 0);
9424 inner = fold_build2 (PLUS_EXPR, sizetype,
9425 arg01, fold_convert (sizetype, arg1));
9426 return fold_convert (type,
9427 fold_build2 (POINTER_PLUS_EXPR,
9428 TREE_TYPE (arg00), arg00, inner));
9431 /* PTR_CST +p CST -> CST1 */
9432 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9433 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9435 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9436 of the array. Loop optimizer sometimes produce this type of
9438 if (TREE_CODE (arg0) == ADDR_EXPR)
9440 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9442 return fold_convert (type, tem);
9448 /* PTR + INT -> (INT)(PTR p+ INT) */
9449 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9450 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9451 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9454 fold_convert (sizetype, arg1)));
9455 /* INT + PTR -> (INT)(PTR p+ INT) */
9456 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9457 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9458 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9461 fold_convert (sizetype, arg0)));
9462 /* A + (-B) -> A - B */
9463 if (TREE_CODE (arg1) == NEGATE_EXPR)
9464 return fold_build2 (MINUS_EXPR, type,
9465 fold_convert (type, arg0),
9466 fold_convert (type, TREE_OPERAND (arg1, 0)));
9467 /* (-A) + B -> B - A */
9468 if (TREE_CODE (arg0) == NEGATE_EXPR
9469 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9470 return fold_build2 (MINUS_EXPR, type,
9471 fold_convert (type, arg1),
9472 fold_convert (type, TREE_OPERAND (arg0, 0)));
9474 if (INTEGRAL_TYPE_P (type))
9476 /* Convert ~A + 1 to -A. */
9477 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9478 && integer_onep (arg1))
9479 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9482 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9483 && !TYPE_OVERFLOW_TRAPS (type))
9485 tree tem = TREE_OPERAND (arg0, 0);
9488 if (operand_equal_p (tem, arg1, 0))
9490 t1 = build_int_cst_type (type, -1);
9491 return omit_one_operand (type, t1, arg1);
9496 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9497 && !TYPE_OVERFLOW_TRAPS (type))
9499 tree tem = TREE_OPERAND (arg1, 0);
9502 if (operand_equal_p (arg0, tem, 0))
9504 t1 = build_int_cst_type (type, -1);
9505 return omit_one_operand (type, t1, arg0);
9509 /* X + (X / CST) * -CST is X % CST. */
9510 if (TREE_CODE (arg1) == MULT_EXPR
9511 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9512 && operand_equal_p (arg0,
9513 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9515 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9516 tree cst1 = TREE_OPERAND (arg1, 1);
9517 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9518 if (sum && integer_zerop (sum))
9519 return fold_convert (type,
9520 fold_build2 (TRUNC_MOD_EXPR,
9521 TREE_TYPE (arg0), arg0, cst0));
9525 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9526 same or one. Make sure type is not saturating.
9527 fold_plusminus_mult_expr will re-associate. */
9528 if ((TREE_CODE (arg0) == MULT_EXPR
9529 || TREE_CODE (arg1) == MULT_EXPR)
9530 && !TYPE_SATURATING (type)
9531 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9533 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9538 if (! FLOAT_TYPE_P (type))
9540 if (integer_zerop (arg1))
9541 return non_lvalue (fold_convert (type, arg0));
9543 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9544 with a constant, and the two constants have no bits in common,
9545 we should treat this as a BIT_IOR_EXPR since this may produce more
9547 if (TREE_CODE (arg0) == BIT_AND_EXPR
9548 && TREE_CODE (arg1) == BIT_AND_EXPR
9549 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9550 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9551 && integer_zerop (const_binop (BIT_AND_EXPR,
9552 TREE_OPERAND (arg0, 1),
9553 TREE_OPERAND (arg1, 1), 0)))
9555 code = BIT_IOR_EXPR;
9559 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9560 (plus (plus (mult) (mult)) (foo)) so that we can
9561 take advantage of the factoring cases below. */
9562 if (((TREE_CODE (arg0) == PLUS_EXPR
9563 || TREE_CODE (arg0) == MINUS_EXPR)
9564 && TREE_CODE (arg1) == MULT_EXPR)
9565 || ((TREE_CODE (arg1) == PLUS_EXPR
9566 || TREE_CODE (arg1) == MINUS_EXPR)
9567 && TREE_CODE (arg0) == MULT_EXPR))
9569 tree parg0, parg1, parg, marg;
9570 enum tree_code pcode;
9572 if (TREE_CODE (arg1) == MULT_EXPR)
9573 parg = arg0, marg = arg1;
9575 parg = arg1, marg = arg0;
9576 pcode = TREE_CODE (parg);
9577 parg0 = TREE_OPERAND (parg, 0);
9578 parg1 = TREE_OPERAND (parg, 1);
9582 if (TREE_CODE (parg0) == MULT_EXPR
9583 && TREE_CODE (parg1) != MULT_EXPR)
9584 return fold_build2 (pcode, type,
9585 fold_build2 (PLUS_EXPR, type,
9586 fold_convert (type, parg0),
9587 fold_convert (type, marg)),
9588 fold_convert (type, parg1));
9589 if (TREE_CODE (parg0) != MULT_EXPR
9590 && TREE_CODE (parg1) == MULT_EXPR)
9591 return fold_build2 (PLUS_EXPR, type,
9592 fold_convert (type, parg0),
9593 fold_build2 (pcode, type,
9594 fold_convert (type, marg),
9601 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9602 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9603 return non_lvalue (fold_convert (type, arg0));
9605 /* Likewise if the operands are reversed. */
9606 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9607 return non_lvalue (fold_convert (type, arg1));
9609 /* Convert X + -C into X - C. */
9610 if (TREE_CODE (arg1) == REAL_CST
9611 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9613 tem = fold_negate_const (arg1, type);
9614 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9615 return fold_build2 (MINUS_EXPR, type,
9616 fold_convert (type, arg0),
9617 fold_convert (type, tem));
9620 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9621 to __complex__ ( x, y ). This is not the same for SNaNs or
9622 if signed zeros are involved. */
9623 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9624 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9625 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9627 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9628 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9629 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9630 bool arg0rz = false, arg0iz = false;
9631 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9632 || (arg0i && (arg0iz = real_zerop (arg0i))))
9634 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9635 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9636 if (arg0rz && arg1i && real_zerop (arg1i))
9638 tree rp = arg1r ? arg1r
9639 : build1 (REALPART_EXPR, rtype, arg1);
9640 tree ip = arg0i ? arg0i
9641 : build1 (IMAGPART_EXPR, rtype, arg0);
9642 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9644 else if (arg0iz && arg1r && real_zerop (arg1r))
9646 tree rp = arg0r ? arg0r
9647 : build1 (REALPART_EXPR, rtype, arg0);
9648 tree ip = arg1i ? arg1i
9649 : build1 (IMAGPART_EXPR, rtype, arg1);
9650 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9655 if (flag_unsafe_math_optimizations
9656 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9657 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9658 && (tem = distribute_real_division (code, type, arg0, arg1)))
9661 /* Convert x+x into x*2.0. */
9662 if (operand_equal_p (arg0, arg1, 0)
9663 && SCALAR_FLOAT_TYPE_P (type))
9664 return fold_build2 (MULT_EXPR, type, arg0,
9665 build_real (type, dconst2));
9667 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9668 We associate floats only if the user has specified
9669 -fassociative-math. */
9670 if (flag_associative_math
9671 && TREE_CODE (arg1) == PLUS_EXPR
9672 && TREE_CODE (arg0) != MULT_EXPR)
9674 tree tree10 = TREE_OPERAND (arg1, 0);
9675 tree tree11 = TREE_OPERAND (arg1, 1);
9676 if (TREE_CODE (tree11) == MULT_EXPR
9677 && TREE_CODE (tree10) == MULT_EXPR)
9680 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9681 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9684 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9685 We associate floats only if the user has specified
9686 -fassociative-math. */
9687 if (flag_associative_math
9688 && TREE_CODE (arg0) == PLUS_EXPR
9689 && TREE_CODE (arg1) != MULT_EXPR)
9691 tree tree00 = TREE_OPERAND (arg0, 0);
9692 tree tree01 = TREE_OPERAND (arg0, 1);
9693 if (TREE_CODE (tree01) == MULT_EXPR
9694 && TREE_CODE (tree00) == MULT_EXPR)
9697 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9698 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9704 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9705 is a rotate of A by C1 bits. */
9706 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9707 is a rotate of A by B bits. */
9709 enum tree_code code0, code1;
9711 code0 = TREE_CODE (arg0);
9712 code1 = TREE_CODE (arg1);
9713 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9714 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9715 && operand_equal_p (TREE_OPERAND (arg0, 0),
9716 TREE_OPERAND (arg1, 0), 0)
9717 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9718 TYPE_UNSIGNED (rtype))
9719 /* Only create rotates in complete modes. Other cases are not
9720 expanded properly. */
9721 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9723 tree tree01, tree11;
9724 enum tree_code code01, code11;
9726 tree01 = TREE_OPERAND (arg0, 1);
9727 tree11 = TREE_OPERAND (arg1, 1);
9728 STRIP_NOPS (tree01);
9729 STRIP_NOPS (tree11);
9730 code01 = TREE_CODE (tree01);
9731 code11 = TREE_CODE (tree11);
9732 if (code01 == INTEGER_CST
9733 && code11 == INTEGER_CST
9734 && TREE_INT_CST_HIGH (tree01) == 0
9735 && TREE_INT_CST_HIGH (tree11) == 0
9736 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9737 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9738 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9739 code0 == LSHIFT_EXPR ? tree01 : tree11);
9740 else if (code11 == MINUS_EXPR)
9742 tree tree110, tree111;
9743 tree110 = TREE_OPERAND (tree11, 0);
9744 tree111 = TREE_OPERAND (tree11, 1);
9745 STRIP_NOPS (tree110);
9746 STRIP_NOPS (tree111);
9747 if (TREE_CODE (tree110) == INTEGER_CST
9748 && 0 == compare_tree_int (tree110,
9750 (TREE_TYPE (TREE_OPERAND
9752 && operand_equal_p (tree01, tree111, 0))
9753 return build2 ((code0 == LSHIFT_EXPR
9756 type, TREE_OPERAND (arg0, 0), tree01);
9758 else if (code01 == MINUS_EXPR)
9760 tree tree010, tree011;
9761 tree010 = TREE_OPERAND (tree01, 0);
9762 tree011 = TREE_OPERAND (tree01, 1);
9763 STRIP_NOPS (tree010);
9764 STRIP_NOPS (tree011);
9765 if (TREE_CODE (tree010) == INTEGER_CST
9766 && 0 == compare_tree_int (tree010,
9768 (TREE_TYPE (TREE_OPERAND
9770 && operand_equal_p (tree11, tree011, 0))
9771 return build2 ((code0 != LSHIFT_EXPR
9774 type, TREE_OPERAND (arg0, 0), tree11);
9780 /* In most languages, can't associate operations on floats through
9781 parentheses. Rather than remember where the parentheses were, we
9782 don't associate floats at all, unless the user has specified
9784 And, we need to make sure type is not saturating. */
9786 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9787 && !TYPE_SATURATING (type))
9789 tree var0, con0, lit0, minus_lit0;
9790 tree var1, con1, lit1, minus_lit1;
9793 /* Split both trees into variables, constants, and literals. Then
9794 associate each group together, the constants with literals,
9795 then the result with variables. This increases the chances of
9796 literals being recombined later and of generating relocatable
9797 expressions for the sum of a constant and literal. */
9798 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9799 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9800 code == MINUS_EXPR);
9802 /* With undefined overflow we can only associate constants
9803 with one variable. */
9804 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9805 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9811 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9812 tmp0 = TREE_OPERAND (tmp0, 0);
9813 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9814 tmp1 = TREE_OPERAND (tmp1, 0);
9815 /* The only case we can still associate with two variables
9816 is if they are the same, modulo negation. */
9817 if (!operand_equal_p (tmp0, tmp1, 0))
9821 /* Only do something if we found more than two objects. Otherwise,
9822 nothing has changed and we risk infinite recursion. */
9824 && (2 < ((var0 != 0) + (var1 != 0)
9825 + (con0 != 0) + (con1 != 0)
9826 + (lit0 != 0) + (lit1 != 0)
9827 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9829 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9830 if (code == MINUS_EXPR)
9833 var0 = associate_trees (var0, var1, code, type);
9834 con0 = associate_trees (con0, con1, code, type);
9835 lit0 = associate_trees (lit0, lit1, code, type);
9836 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9838 /* Preserve the MINUS_EXPR if the negative part of the literal is
9839 greater than the positive part. Otherwise, the multiplicative
9840 folding code (i.e extract_muldiv) may be fooled in case
9841 unsigned constants are subtracted, like in the following
9842 example: ((X*2 + 4) - 8U)/2. */
9843 if (minus_lit0 && lit0)
9845 if (TREE_CODE (lit0) == INTEGER_CST
9846 && TREE_CODE (minus_lit0) == INTEGER_CST
9847 && tree_int_cst_lt (lit0, minus_lit0))
9849 minus_lit0 = associate_trees (minus_lit0, lit0,
9855 lit0 = associate_trees (lit0, minus_lit0,
9863 return fold_convert (type,
9864 associate_trees (var0, minus_lit0,
9868 con0 = associate_trees (con0, minus_lit0,
9870 return fold_convert (type,
9871 associate_trees (var0, con0,
9876 con0 = associate_trees (con0, lit0, code, type);
9877 return fold_convert (type, associate_trees (var0, con0,
9885 /* Pointer simplifications for subtraction, simple reassociations. */
9886 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9888 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9889 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9890 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9892 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9893 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9894 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9895 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9896 return fold_build2 (PLUS_EXPR, type,
9897 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9898 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9900 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9901 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9903 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9904 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9905 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9907 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9910 /* A - (-B) -> A + B */
9911 if (TREE_CODE (arg1) == NEGATE_EXPR)
9912 return fold_build2 (PLUS_EXPR, type, op0,
9913 fold_convert (type, TREE_OPERAND (arg1, 0)));
9914 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9915 if (TREE_CODE (arg0) == NEGATE_EXPR
9916 && (FLOAT_TYPE_P (type)
9917 || INTEGRAL_TYPE_P (type))
9918 && negate_expr_p (arg1)
9919 && reorder_operands_p (arg0, arg1))
9920 return fold_build2 (MINUS_EXPR, type,
9921 fold_convert (type, negate_expr (arg1)),
9922 fold_convert (type, TREE_OPERAND (arg0, 0)));
9923 /* Convert -A - 1 to ~A. */
9924 if (INTEGRAL_TYPE_P (type)
9925 && TREE_CODE (arg0) == NEGATE_EXPR
9926 && integer_onep (arg1)
9927 && !TYPE_OVERFLOW_TRAPS (type))
9928 return fold_build1 (BIT_NOT_EXPR, type,
9929 fold_convert (type, TREE_OPERAND (arg0, 0)));
9931 /* Convert -1 - A to ~A. */
9932 if (INTEGRAL_TYPE_P (type)
9933 && integer_all_onesp (arg0))
9934 return fold_build1 (BIT_NOT_EXPR, type, op1);
9937 /* X - (X / CST) * CST is X % CST. */
9938 if (INTEGRAL_TYPE_P (type)
9939 && TREE_CODE (arg1) == MULT_EXPR
9940 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9941 && operand_equal_p (arg0,
9942 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9943 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9944 TREE_OPERAND (arg1, 1), 0))
9945 return fold_convert (type,
9946 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9947 arg0, TREE_OPERAND (arg1, 1)));
9949 if (! FLOAT_TYPE_P (type))
9951 if (integer_zerop (arg0))
9952 return negate_expr (fold_convert (type, arg1));
9953 if (integer_zerop (arg1))
9954 return non_lvalue (fold_convert (type, arg0));
9956 /* Fold A - (A & B) into ~B & A. */
9957 if (!TREE_SIDE_EFFECTS (arg0)
9958 && TREE_CODE (arg1) == BIT_AND_EXPR)
9960 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9962 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9963 return fold_build2 (BIT_AND_EXPR, type,
9964 fold_build1 (BIT_NOT_EXPR, type, arg10),
9965 fold_convert (type, arg0));
9967 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9969 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9970 return fold_build2 (BIT_AND_EXPR, type,
9971 fold_build1 (BIT_NOT_EXPR, type, arg11),
9972 fold_convert (type, arg0));
9976 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9977 any power of 2 minus 1. */
9978 if (TREE_CODE (arg0) == BIT_AND_EXPR
9979 && TREE_CODE (arg1) == BIT_AND_EXPR
9980 && operand_equal_p (TREE_OPERAND (arg0, 0),
9981 TREE_OPERAND (arg1, 0), 0))
9983 tree mask0 = TREE_OPERAND (arg0, 1);
9984 tree mask1 = TREE_OPERAND (arg1, 1);
9985 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9987 if (operand_equal_p (tem, mask1, 0))
9989 tem = fold_build2 (BIT_XOR_EXPR, type,
9990 TREE_OPERAND (arg0, 0), mask1);
9991 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9996 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9997 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9998 return non_lvalue (fold_convert (type, arg0));
10000 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10001 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10002 (-ARG1 + ARG0) reduces to -ARG1. */
10003 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10004 return negate_expr (fold_convert (type, arg1));
10006 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10007 __complex__ ( x, -y ). This is not the same for SNaNs or if
10008 signed zeros are involved. */
10009 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10010 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10011 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10013 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10014 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10015 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10016 bool arg0rz = false, arg0iz = false;
10017 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10018 || (arg0i && (arg0iz = real_zerop (arg0i))))
10020 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10021 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10022 if (arg0rz && arg1i && real_zerop (arg1i))
10024 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10026 : build1 (REALPART_EXPR, rtype, arg1));
10027 tree ip = arg0i ? arg0i
10028 : build1 (IMAGPART_EXPR, rtype, arg0);
10029 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10031 else if (arg0iz && arg1r && real_zerop (arg1r))
10033 tree rp = arg0r ? arg0r
10034 : build1 (REALPART_EXPR, rtype, arg0);
10035 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10037 : build1 (IMAGPART_EXPR, rtype, arg1));
10038 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10043 /* Fold &x - &x. This can happen from &x.foo - &x.
10044 This is unsafe for certain floats even in non-IEEE formats.
10045 In IEEE, it is unsafe because it does wrong for NaNs.
10046 Also note that operand_equal_p is always false if an operand
10049 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10050 && operand_equal_p (arg0, arg1, 0))
10051 return fold_convert (type, integer_zero_node);
10053 /* A - B -> A + (-B) if B is easily negatable. */
10054 if (negate_expr_p (arg1)
10055 && ((FLOAT_TYPE_P (type)
10056 /* Avoid this transformation if B is a positive REAL_CST. */
10057 && (TREE_CODE (arg1) != REAL_CST
10058 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10059 || INTEGRAL_TYPE_P (type)))
10060 return fold_build2 (PLUS_EXPR, type,
10061 fold_convert (type, arg0),
10062 fold_convert (type, negate_expr (arg1)));
10064 /* Try folding difference of addresses. */
10066 HOST_WIDE_INT diff;
10068 if ((TREE_CODE (arg0) == ADDR_EXPR
10069 || TREE_CODE (arg1) == ADDR_EXPR)
10070 && ptr_difference_const (arg0, arg1, &diff))
10071 return build_int_cst_type (type, diff);
10074 /* Fold &a[i] - &a[j] to i-j. */
10075 if (TREE_CODE (arg0) == ADDR_EXPR
10076 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10077 && TREE_CODE (arg1) == ADDR_EXPR
10078 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10080 tree aref0 = TREE_OPERAND (arg0, 0);
10081 tree aref1 = TREE_OPERAND (arg1, 0);
10082 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10083 TREE_OPERAND (aref1, 0), 0))
10085 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10086 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10087 tree esz = array_ref_element_size (aref0);
10088 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10089 return fold_build2 (MULT_EXPR, type, diff,
10090 fold_convert (type, esz));
10095 if (flag_unsafe_math_optimizations
10096 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10097 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10098 && (tem = distribute_real_division (code, type, arg0, arg1)))
10101 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10102 same or one. Make sure type is not saturating.
10103 fold_plusminus_mult_expr will re-associate. */
10104 if ((TREE_CODE (arg0) == MULT_EXPR
10105 || TREE_CODE (arg1) == MULT_EXPR)
10106 && !TYPE_SATURATING (type)
10107 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10109 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10117 /* (-A) * (-B) -> A * B */
10118 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10119 return fold_build2 (MULT_EXPR, type,
10120 fold_convert (type, TREE_OPERAND (arg0, 0)),
10121 fold_convert (type, negate_expr (arg1)));
10122 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10123 return fold_build2 (MULT_EXPR, type,
10124 fold_convert (type, negate_expr (arg0)),
10125 fold_convert (type, TREE_OPERAND (arg1, 0)));
10127 if (! FLOAT_TYPE_P (type))
10129 if (integer_zerop (arg1))
10130 return omit_one_operand (type, arg1, arg0);
10131 if (integer_onep (arg1))
10132 return non_lvalue (fold_convert (type, arg0));
10133 /* Transform x * -1 into -x. Make sure to do the negation
10134 on the original operand with conversions not stripped
10135 because we can only strip non-sign-changing conversions. */
10136 if (integer_all_onesp (arg1))
10137 return fold_convert (type, negate_expr (op0));
10138 /* Transform x * -C into -x * C if x is easily negatable. */
10139 if (TREE_CODE (arg1) == INTEGER_CST
10140 && tree_int_cst_sgn (arg1) == -1
10141 && negate_expr_p (arg0)
10142 && (tem = negate_expr (arg1)) != arg1
10143 && !TREE_OVERFLOW (tem))
10144 return fold_build2 (MULT_EXPR, type,
10145 fold_convert (type, negate_expr (arg0)), tem);
10147 /* (a * (1 << b)) is (a << b) */
10148 if (TREE_CODE (arg1) == LSHIFT_EXPR
10149 && integer_onep (TREE_OPERAND (arg1, 0)))
10150 return fold_build2 (LSHIFT_EXPR, type, op0,
10151 TREE_OPERAND (arg1, 1));
10152 if (TREE_CODE (arg0) == LSHIFT_EXPR
10153 && integer_onep (TREE_OPERAND (arg0, 0)))
10154 return fold_build2 (LSHIFT_EXPR, type, op1,
10155 TREE_OPERAND (arg0, 1));
10157 /* (A + A) * C -> A * 2 * C */
10158 if (TREE_CODE (arg0) == PLUS_EXPR
10159 && TREE_CODE (arg1) == INTEGER_CST
10160 && operand_equal_p (TREE_OPERAND (arg0, 0),
10161 TREE_OPERAND (arg0, 1), 0))
10162 return fold_build2 (MULT_EXPR, type,
10163 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10164 TREE_OPERAND (arg0, 1)),
10165 fold_build2 (MULT_EXPR, type,
10166 build_int_cst (type, 2) , arg1));
10168 strict_overflow_p = false;
10169 if (TREE_CODE (arg1) == INTEGER_CST
10170 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10171 &strict_overflow_p)))
10173 if (strict_overflow_p)
10174 fold_overflow_warning (("assuming signed overflow does not "
10175 "occur when simplifying "
10177 WARN_STRICT_OVERFLOW_MISC);
10178 return fold_convert (type, tem);
10181 /* Optimize z * conj(z) for integer complex numbers. */
10182 if (TREE_CODE (arg0) == CONJ_EXPR
10183 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10184 return fold_mult_zconjz (type, arg1);
10185 if (TREE_CODE (arg1) == CONJ_EXPR
10186 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10187 return fold_mult_zconjz (type, arg0);
10191 /* Maybe fold x * 0 to 0. The expressions aren't the same
10192 when x is NaN, since x * 0 is also NaN. Nor are they the
10193 same in modes with signed zeros, since multiplying a
10194 negative value by 0 gives -0, not +0. */
10195 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10196 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10197 && real_zerop (arg1))
10198 return omit_one_operand (type, arg1, arg0);
10199 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10200 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10201 && real_onep (arg1))
10202 return non_lvalue (fold_convert (type, arg0));
10204 /* Transform x * -1.0 into -x. */
10205 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10206 && real_minus_onep (arg1))
10207 return fold_convert (type, negate_expr (arg0));
10209 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10210 the result for floating point types due to rounding so it is applied
10211 only if -fassociative-math was specify. */
10212 if (flag_associative_math
10213 && TREE_CODE (arg0) == RDIV_EXPR
10214 && TREE_CODE (arg1) == REAL_CST
10215 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10217 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10220 return fold_build2 (RDIV_EXPR, type, tem,
10221 TREE_OPERAND (arg0, 1));
10224 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10225 if (operand_equal_p (arg0, arg1, 0))
10227 tree tem = fold_strip_sign_ops (arg0);
10228 if (tem != NULL_TREE)
10230 tem = fold_convert (type, tem);
10231 return fold_build2 (MULT_EXPR, type, tem, tem);
10235 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10236 This is not the same for NaNs or if signed zeros are
10238 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10239 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10240 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10241 && TREE_CODE (arg1) == COMPLEX_CST
10242 && real_zerop (TREE_REALPART (arg1)))
10244 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10245 if (real_onep (TREE_IMAGPART (arg1)))
10246 return fold_build2 (COMPLEX_EXPR, type,
10247 negate_expr (fold_build1 (IMAGPART_EXPR,
10249 fold_build1 (REALPART_EXPR, rtype, arg0));
10250 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10251 return fold_build2 (COMPLEX_EXPR, type,
10252 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10253 negate_expr (fold_build1 (REALPART_EXPR,
10257 /* Optimize z * conj(z) for floating point complex numbers.
10258 Guarded by flag_unsafe_math_optimizations as non-finite
10259 imaginary components don't produce scalar results. */
10260 if (flag_unsafe_math_optimizations
10261 && TREE_CODE (arg0) == CONJ_EXPR
10262 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10263 return fold_mult_zconjz (type, arg1);
10264 if (flag_unsafe_math_optimizations
10265 && TREE_CODE (arg1) == CONJ_EXPR
10266 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10267 return fold_mult_zconjz (type, arg0);
10269 if (flag_unsafe_math_optimizations)
10271 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10272 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10274 /* Optimizations of root(...)*root(...). */
10275 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10278 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10279 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10281 /* Optimize sqrt(x)*sqrt(x) as x. */
10282 if (BUILTIN_SQRT_P (fcode0)
10283 && operand_equal_p (arg00, arg10, 0)
10284 && ! HONOR_SNANS (TYPE_MODE (type)))
10287 /* Optimize root(x)*root(y) as root(x*y). */
10288 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10289 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10290 return build_call_expr (rootfn, 1, arg);
10293 /* Optimize expN(x)*expN(y) as expN(x+y). */
10294 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10296 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10297 tree arg = fold_build2 (PLUS_EXPR, type,
10298 CALL_EXPR_ARG (arg0, 0),
10299 CALL_EXPR_ARG (arg1, 0));
10300 return build_call_expr (expfn, 1, arg);
10303 /* Optimizations of pow(...)*pow(...). */
10304 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10305 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10306 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10308 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10309 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10310 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10311 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10313 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10314 if (operand_equal_p (arg01, arg11, 0))
10316 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10317 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10318 return build_call_expr (powfn, 2, arg, arg01);
10321 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10322 if (operand_equal_p (arg00, arg10, 0))
10324 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10325 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10326 return build_call_expr (powfn, 2, arg00, arg);
10330 /* Optimize tan(x)*cos(x) as sin(x). */
10331 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10332 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10333 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10334 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10335 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10336 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10337 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10338 CALL_EXPR_ARG (arg1, 0), 0))
10340 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10342 if (sinfn != NULL_TREE)
10343 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10346 /* Optimize x*pow(x,c) as pow(x,c+1). */
10347 if (fcode1 == BUILT_IN_POW
10348 || fcode1 == BUILT_IN_POWF
10349 || fcode1 == BUILT_IN_POWL)
10351 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10352 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10353 if (TREE_CODE (arg11) == REAL_CST
10354 && !TREE_OVERFLOW (arg11)
10355 && operand_equal_p (arg0, arg10, 0))
10357 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10361 c = TREE_REAL_CST (arg11);
10362 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10363 arg = build_real (type, c);
10364 return build_call_expr (powfn, 2, arg0, arg);
10368 /* Optimize pow(x,c)*x as pow(x,c+1). */
10369 if (fcode0 == BUILT_IN_POW
10370 || fcode0 == BUILT_IN_POWF
10371 || fcode0 == BUILT_IN_POWL)
10373 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10374 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10375 if (TREE_CODE (arg01) == REAL_CST
10376 && !TREE_OVERFLOW (arg01)
10377 && operand_equal_p (arg1, arg00, 0))
10379 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10383 c = TREE_REAL_CST (arg01);
10384 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10385 arg = build_real (type, c);
10386 return build_call_expr (powfn, 2, arg1, arg);
10390 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10391 if (! optimize_size
10392 && operand_equal_p (arg0, arg1, 0))
10394 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10398 tree arg = build_real (type, dconst2);
10399 return build_call_expr (powfn, 2, arg0, arg);
10408 if (integer_all_onesp (arg1))
10409 return omit_one_operand (type, arg1, arg0);
10410 if (integer_zerop (arg1))
10411 return non_lvalue (fold_convert (type, arg0));
10412 if (operand_equal_p (arg0, arg1, 0))
10413 return non_lvalue (fold_convert (type, arg0));
10415 /* ~X | X is -1. */
10416 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10417 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10419 t1 = fold_convert (type, integer_zero_node);
10420 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10421 return omit_one_operand (type, t1, arg1);
10424 /* X | ~X is -1. */
10425 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10426 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10428 t1 = fold_convert (type, integer_zero_node);
10429 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10430 return omit_one_operand (type, t1, arg0);
10433 /* Canonicalize (X & C1) | C2. */
10434 if (TREE_CODE (arg0) == BIT_AND_EXPR
10435 && TREE_CODE (arg1) == INTEGER_CST
10436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10438 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10439 int width = TYPE_PRECISION (type), w;
10440 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10441 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10442 hi2 = TREE_INT_CST_HIGH (arg1);
10443 lo2 = TREE_INT_CST_LOW (arg1);
10445 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10446 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10447 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10449 if (width > HOST_BITS_PER_WIDE_INT)
10451 mhi = (unsigned HOST_WIDE_INT) -1
10452 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10458 mlo = (unsigned HOST_WIDE_INT) -1
10459 >> (HOST_BITS_PER_WIDE_INT - width);
10462 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10463 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10464 return fold_build2 (BIT_IOR_EXPR, type,
10465 TREE_OPERAND (arg0, 0), arg1);
10467 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10468 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10469 mode which allows further optimizations. */
10476 for (w = BITS_PER_UNIT;
10477 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10480 unsigned HOST_WIDE_INT mask
10481 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10482 if (((lo1 | lo2) & mask) == mask
10483 && (lo1 & ~mask) == 0 && hi1 == 0)
10490 if (hi3 != hi1 || lo3 != lo1)
10491 return fold_build2 (BIT_IOR_EXPR, type,
10492 fold_build2 (BIT_AND_EXPR, type,
10493 TREE_OPERAND (arg0, 0),
10494 build_int_cst_wide (type,
10499 /* (X & Y) | Y is (X, Y). */
10500 if (TREE_CODE (arg0) == BIT_AND_EXPR
10501 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10502 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10503 /* (X & Y) | X is (Y, X). */
10504 if (TREE_CODE (arg0) == BIT_AND_EXPR
10505 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10506 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10507 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10508 /* X | (X & Y) is (Y, X). */
10509 if (TREE_CODE (arg1) == BIT_AND_EXPR
10510 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10511 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10512 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10513 /* X | (Y & X) is (Y, X). */
10514 if (TREE_CODE (arg1) == BIT_AND_EXPR
10515 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10516 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10517 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10519 t1 = distribute_bit_expr (code, type, arg0, arg1);
10520 if (t1 != NULL_TREE)
10523 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10525 This results in more efficient code for machines without a NAND
10526 instruction. Combine will canonicalize to the first form
10527 which will allow use of NAND instructions provided by the
10528 backend if they exist. */
10529 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10530 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10532 return fold_build1 (BIT_NOT_EXPR, type,
10533 build2 (BIT_AND_EXPR, type,
10534 fold_convert (type,
10535 TREE_OPERAND (arg0, 0)),
10536 fold_convert (type,
10537 TREE_OPERAND (arg1, 0))));
10540 /* See if this can be simplified into a rotate first. If that
10541 is unsuccessful continue in the association code. */
10545 if (integer_zerop (arg1))
10546 return non_lvalue (fold_convert (type, arg0));
10547 if (integer_all_onesp (arg1))
10548 return fold_build1 (BIT_NOT_EXPR, type, op0);
10549 if (operand_equal_p (arg0, arg1, 0))
10550 return omit_one_operand (type, integer_zero_node, arg0);
10552 /* ~X ^ X is -1. */
10553 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10556 t1 = fold_convert (type, integer_zero_node);
10557 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10558 return omit_one_operand (type, t1, arg1);
10561 /* X ^ ~X is -1. */
10562 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10563 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10565 t1 = fold_convert (type, integer_zero_node);
10566 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10567 return omit_one_operand (type, t1, arg0);
10570 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10571 with a constant, and the two constants have no bits in common,
10572 we should treat this as a BIT_IOR_EXPR since this may produce more
10573 simplifications. */
10574 if (TREE_CODE (arg0) == BIT_AND_EXPR
10575 && TREE_CODE (arg1) == BIT_AND_EXPR
10576 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10577 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10578 && integer_zerop (const_binop (BIT_AND_EXPR,
10579 TREE_OPERAND (arg0, 1),
10580 TREE_OPERAND (arg1, 1), 0)))
10582 code = BIT_IOR_EXPR;
10586 /* (X | Y) ^ X -> Y & ~ X*/
10587 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10588 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10590 tree t2 = TREE_OPERAND (arg0, 1);
10591 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10593 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10594 fold_convert (type, t1));
10598 /* (Y | X) ^ X -> Y & ~ X*/
10599 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10600 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10602 tree t2 = TREE_OPERAND (arg0, 0);
10603 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10605 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10606 fold_convert (type, t1));
10610 /* X ^ (X | Y) -> Y & ~ X*/
10611 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10612 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10614 tree t2 = TREE_OPERAND (arg1, 1);
10615 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10617 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10618 fold_convert (type, t1));
10622 /* X ^ (Y | X) -> Y & ~ X*/
10623 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10624 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10626 tree t2 = TREE_OPERAND (arg1, 0);
10627 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10629 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10630 fold_convert (type, t1));
10634 /* Convert ~X ^ ~Y to X ^ Y. */
10635 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10636 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10637 return fold_build2 (code, type,
10638 fold_convert (type, TREE_OPERAND (arg0, 0)),
10639 fold_convert (type, TREE_OPERAND (arg1, 0)));
10641 /* Convert ~X ^ C to X ^ ~C. */
10642 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10643 && TREE_CODE (arg1) == INTEGER_CST)
10644 return fold_build2 (code, type,
10645 fold_convert (type, TREE_OPERAND (arg0, 0)),
10646 fold_build1 (BIT_NOT_EXPR, type, arg1));
10648 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10649 if (TREE_CODE (arg0) == BIT_AND_EXPR
10650 && integer_onep (TREE_OPERAND (arg0, 1))
10651 && integer_onep (arg1))
10652 return fold_build2 (EQ_EXPR, type, arg0,
10653 build_int_cst (TREE_TYPE (arg0), 0));
10655 /* Fold (X & Y) ^ Y as ~X & Y. */
10656 if (TREE_CODE (arg0) == BIT_AND_EXPR
10657 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10659 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10660 return fold_build2 (BIT_AND_EXPR, type,
10661 fold_build1 (BIT_NOT_EXPR, type, tem),
10662 fold_convert (type, arg1));
10664 /* Fold (X & Y) ^ X as ~Y & X. */
10665 if (TREE_CODE (arg0) == BIT_AND_EXPR
10666 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10667 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10669 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10670 return fold_build2 (BIT_AND_EXPR, type,
10671 fold_build1 (BIT_NOT_EXPR, type, tem),
10672 fold_convert (type, arg1));
10674 /* Fold X ^ (X & Y) as X & ~Y. */
10675 if (TREE_CODE (arg1) == BIT_AND_EXPR
10676 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10678 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10679 return fold_build2 (BIT_AND_EXPR, type,
10680 fold_convert (type, arg0),
10681 fold_build1 (BIT_NOT_EXPR, type, tem));
10683 /* Fold X ^ (Y & X) as ~Y & X. */
10684 if (TREE_CODE (arg1) == BIT_AND_EXPR
10685 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10686 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10688 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10689 return fold_build2 (BIT_AND_EXPR, type,
10690 fold_build1 (BIT_NOT_EXPR, type, tem),
10691 fold_convert (type, arg0));
10694 /* See if this can be simplified into a rotate first. If that
10695 is unsuccessful continue in the association code. */
10699 if (integer_all_onesp (arg1))
10700 return non_lvalue (fold_convert (type, arg0));
10701 if (integer_zerop (arg1))
10702 return omit_one_operand (type, arg1, arg0);
10703 if (operand_equal_p (arg0, arg1, 0))
10704 return non_lvalue (fold_convert (type, arg0));
10706 /* ~X & X is always zero. */
10707 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10708 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10709 return omit_one_operand (type, integer_zero_node, arg1);
10711 /* X & ~X is always zero. */
10712 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10713 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10714 return omit_one_operand (type, integer_zero_node, arg0);
10716 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10717 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10718 && TREE_CODE (arg1) == INTEGER_CST
10719 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10721 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10722 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10723 TREE_OPERAND (arg0, 0), tmp1);
10724 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10725 TREE_OPERAND (arg0, 1), tmp1);
10726 return fold_convert (type,
10727 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10731 /* (X | Y) & Y is (X, Y). */
10732 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10733 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10734 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10735 /* (X | Y) & X is (Y, X). */
10736 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10737 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10738 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10739 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10740 /* X & (X | Y) is (Y, X). */
10741 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10742 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10743 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10744 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10745 /* X & (Y | X) is (Y, X). */
10746 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10747 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10748 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10749 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10751 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10752 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10753 && integer_onep (TREE_OPERAND (arg0, 1))
10754 && integer_onep (arg1))
10756 tem = TREE_OPERAND (arg0, 0);
10757 return fold_build2 (EQ_EXPR, type,
10758 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10759 build_int_cst (TREE_TYPE (tem), 1)),
10760 build_int_cst (TREE_TYPE (tem), 0));
10762 /* Fold ~X & 1 as (X & 1) == 0. */
10763 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10764 && integer_onep (arg1))
10766 tem = TREE_OPERAND (arg0, 0);
10767 return fold_build2 (EQ_EXPR, type,
10768 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10769 build_int_cst (TREE_TYPE (tem), 1)),
10770 build_int_cst (TREE_TYPE (tem), 0));
10773 /* Fold (X ^ Y) & Y as ~X & Y. */
10774 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10775 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10777 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10778 return fold_build2 (BIT_AND_EXPR, type,
10779 fold_build1 (BIT_NOT_EXPR, type, tem),
10780 fold_convert (type, arg1));
10782 /* Fold (X ^ Y) & X as ~Y & X. */
10783 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10784 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10785 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10787 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10788 return fold_build2 (BIT_AND_EXPR, type,
10789 fold_build1 (BIT_NOT_EXPR, type, tem),
10790 fold_convert (type, arg1));
10792 /* Fold X & (X ^ Y) as X & ~Y. */
10793 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10794 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10796 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10797 return fold_build2 (BIT_AND_EXPR, type,
10798 fold_convert (type, arg0),
10799 fold_build1 (BIT_NOT_EXPR, type, tem));
10801 /* Fold X & (Y ^ X) as ~Y & X. */
10802 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10803 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10804 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10806 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10807 return fold_build2 (BIT_AND_EXPR, type,
10808 fold_build1 (BIT_NOT_EXPR, type, tem),
10809 fold_convert (type, arg0));
10812 t1 = distribute_bit_expr (code, type, arg0, arg1);
10813 if (t1 != NULL_TREE)
10815 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10816 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10817 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10820 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10822 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10823 && (~TREE_INT_CST_LOW (arg1)
10824 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10825 return fold_convert (type, TREE_OPERAND (arg0, 0));
10828 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10830 This results in more efficient code for machines without a NOR
10831 instruction. Combine will canonicalize to the first form
10832 which will allow use of NOR instructions provided by the
10833 backend if they exist. */
10834 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10835 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10837 return fold_build1 (BIT_NOT_EXPR, type,
10838 build2 (BIT_IOR_EXPR, type,
10839 fold_convert (type,
10840 TREE_OPERAND (arg0, 0)),
10841 fold_convert (type,
10842 TREE_OPERAND (arg1, 0))));
10845 /* If arg0 is derived from the address of an object or function, we may
10846 be able to fold this expression using the object or function's
10848 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10850 unsigned HOST_WIDE_INT modulus, residue;
10851 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10853 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10855 /* This works because modulus is a power of 2. If this weren't the
10856 case, we'd have to replace it by its greatest power-of-2
10857 divisor: modulus & -modulus. */
10859 return build_int_cst (type, residue & low);
10862 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10863 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10864 if the new mask might be further optimized. */
10865 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10866 || TREE_CODE (arg0) == RSHIFT_EXPR)
10867 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10868 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10869 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10870 < TYPE_PRECISION (TREE_TYPE (arg0))
10871 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10872 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10874 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10875 unsigned HOST_WIDE_INT mask
10876 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10877 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10878 tree shift_type = TREE_TYPE (arg0);
10880 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10881 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10882 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10883 && TYPE_PRECISION (TREE_TYPE (arg0))
10884 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10886 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10887 tree arg00 = TREE_OPERAND (arg0, 0);
10888 /* See if more bits can be proven as zero because of
10890 if (TREE_CODE (arg00) == NOP_EXPR
10891 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10893 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10894 if (TYPE_PRECISION (inner_type)
10895 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10896 && TYPE_PRECISION (inner_type) < prec)
10898 prec = TYPE_PRECISION (inner_type);
10899 /* See if we can shorten the right shift. */
10901 shift_type = inner_type;
10904 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10905 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10906 zerobits <<= prec - shiftc;
10907 /* For arithmetic shift if sign bit could be set, zerobits
10908 can contain actually sign bits, so no transformation is
10909 possible, unless MASK masks them all away. In that
10910 case the shift needs to be converted into logical shift. */
10911 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10912 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10914 if ((mask & zerobits) == 0)
10915 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10921 /* ((X << 16) & 0xff00) is (X, 0). */
10922 if ((mask & zerobits) == mask)
10923 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10925 newmask = mask | zerobits;
10926 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10930 /* Only do the transformation if NEWMASK is some integer
10932 for (prec = BITS_PER_UNIT;
10933 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10934 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10936 if (prec < HOST_BITS_PER_WIDE_INT
10937 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10939 if (shift_type != TREE_TYPE (arg0))
10941 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10942 fold_convert (shift_type,
10943 TREE_OPERAND (arg0, 0)),
10944 TREE_OPERAND (arg0, 1));
10945 tem = fold_convert (type, tem);
10949 return fold_build2 (BIT_AND_EXPR, type, tem,
10950 build_int_cst_type (TREE_TYPE (op1),
10959 /* Don't touch a floating-point divide by zero unless the mode
10960 of the constant can represent infinity. */
10961 if (TREE_CODE (arg1) == REAL_CST
10962 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10963 && real_zerop (arg1))
10966 /* Optimize A / A to 1.0 if we don't care about
10967 NaNs or Infinities. Skip the transformation
10968 for non-real operands. */
10969 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10970 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10971 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10972 && operand_equal_p (arg0, arg1, 0))
10974 tree r = build_real (TREE_TYPE (arg0), dconst1);
10976 return omit_two_operands (type, r, arg0, arg1);
10979 /* The complex version of the above A / A optimization. */
10980 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10981 && operand_equal_p (arg0, arg1, 0))
10983 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10984 if (! HONOR_NANS (TYPE_MODE (elem_type))
10985 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10987 tree r = build_real (elem_type, dconst1);
10988 /* omit_two_operands will call fold_convert for us. */
10989 return omit_two_operands (type, r, arg0, arg1);
10993 /* (-A) / (-B) -> A / B */
10994 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10995 return fold_build2 (RDIV_EXPR, type,
10996 TREE_OPERAND (arg0, 0),
10997 negate_expr (arg1));
10998 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10999 return fold_build2 (RDIV_EXPR, type,
11000 negate_expr (arg0),
11001 TREE_OPERAND (arg1, 0));
11003 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11004 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11005 && real_onep (arg1))
11006 return non_lvalue (fold_convert (type, arg0));
11008 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11009 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11010 && real_minus_onep (arg1))
11011 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11013 /* If ARG1 is a constant, we can convert this to a multiply by the
11014 reciprocal. This does not have the same rounding properties,
11015 so only do this if -freciprocal-math. We can actually
11016 always safely do it if ARG1 is a power of two, but it's hard to
11017 tell if it is or not in a portable manner. */
11018 if (TREE_CODE (arg1) == REAL_CST)
11020 if (flag_reciprocal_math
11021 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11023 return fold_build2 (MULT_EXPR, type, arg0, tem);
11024 /* Find the reciprocal if optimizing and the result is exact. */
11028 r = TREE_REAL_CST (arg1);
11029 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11031 tem = build_real (type, r);
11032 return fold_build2 (MULT_EXPR, type,
11033 fold_convert (type, arg0), tem);
11037 /* Convert A/B/C to A/(B*C). */
11038 if (flag_reciprocal_math
11039 && TREE_CODE (arg0) == RDIV_EXPR)
11040 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11041 fold_build2 (MULT_EXPR, type,
11042 TREE_OPERAND (arg0, 1), arg1));
11044 /* Convert A/(B/C) to (A/B)*C. */
11045 if (flag_reciprocal_math
11046 && TREE_CODE (arg1) == RDIV_EXPR)
11047 return fold_build2 (MULT_EXPR, type,
11048 fold_build2 (RDIV_EXPR, type, arg0,
11049 TREE_OPERAND (arg1, 0)),
11050 TREE_OPERAND (arg1, 1));
11052 /* Convert C1/(X*C2) into (C1/C2)/X. */
11053 if (flag_reciprocal_math
11054 && TREE_CODE (arg1) == MULT_EXPR
11055 && TREE_CODE (arg0) == REAL_CST
11056 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11058 tree tem = const_binop (RDIV_EXPR, arg0,
11059 TREE_OPERAND (arg1, 1), 0);
11061 return fold_build2 (RDIV_EXPR, type, tem,
11062 TREE_OPERAND (arg1, 0));
11065 if (flag_unsafe_math_optimizations)
11067 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11068 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11070 /* Optimize sin(x)/cos(x) as tan(x). */
11071 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11072 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11073 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11074 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11075 CALL_EXPR_ARG (arg1, 0), 0))
11077 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11079 if (tanfn != NULL_TREE)
11080 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11083 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11084 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11085 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11086 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11087 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11088 CALL_EXPR_ARG (arg1, 0), 0))
11090 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11092 if (tanfn != NULL_TREE)
11094 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11095 return fold_build2 (RDIV_EXPR, type,
11096 build_real (type, dconst1), tmp);
11100 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11101 NaNs or Infinities. */
11102 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11103 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11104 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11106 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11107 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11109 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11110 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11111 && operand_equal_p (arg00, arg01, 0))
11113 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11115 if (cosfn != NULL_TREE)
11116 return build_call_expr (cosfn, 1, arg00);
11120 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11121 NaNs or Infinities. */
11122 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11123 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11124 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11126 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11127 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11129 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11130 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11131 && operand_equal_p (arg00, arg01, 0))
11133 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11135 if (cosfn != NULL_TREE)
11137 tree tmp = build_call_expr (cosfn, 1, arg00);
11138 return fold_build2 (RDIV_EXPR, type,
11139 build_real (type, dconst1),
11145 /* Optimize pow(x,c)/x as pow(x,c-1). */
11146 if (fcode0 == BUILT_IN_POW
11147 || fcode0 == BUILT_IN_POWF
11148 || fcode0 == BUILT_IN_POWL)
11150 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11151 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11152 if (TREE_CODE (arg01) == REAL_CST
11153 && !TREE_OVERFLOW (arg01)
11154 && operand_equal_p (arg1, arg00, 0))
11156 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11160 c = TREE_REAL_CST (arg01);
11161 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11162 arg = build_real (type, c);
11163 return build_call_expr (powfn, 2, arg1, arg);
11167 /* Optimize a/root(b/c) into a*root(c/b). */
11168 if (BUILTIN_ROOT_P (fcode1))
11170 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11172 if (TREE_CODE (rootarg) == RDIV_EXPR)
11174 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11175 tree b = TREE_OPERAND (rootarg, 0);
11176 tree c = TREE_OPERAND (rootarg, 1);
11178 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11180 tmp = build_call_expr (rootfn, 1, tmp);
11181 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11185 /* Optimize x/expN(y) into x*expN(-y). */
11186 if (BUILTIN_EXPONENT_P (fcode1))
11188 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11189 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11190 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11191 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11194 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11195 if (fcode1 == BUILT_IN_POW
11196 || fcode1 == BUILT_IN_POWF
11197 || fcode1 == BUILT_IN_POWL)
11199 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11200 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11201 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11202 tree neg11 = fold_convert (type, negate_expr (arg11));
11203 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11204 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11209 case TRUNC_DIV_EXPR:
11210 case FLOOR_DIV_EXPR:
11211 /* Simplify A / (B << N) where A and B are positive and B is
11212 a power of 2, to A >> (N + log2(B)). */
11213 strict_overflow_p = false;
11214 if (TREE_CODE (arg1) == LSHIFT_EXPR
11215 && (TYPE_UNSIGNED (type)
11216 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11218 tree sval = TREE_OPERAND (arg1, 0);
11219 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11221 tree sh_cnt = TREE_OPERAND (arg1, 1);
11222 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11224 if (strict_overflow_p)
11225 fold_overflow_warning (("assuming signed overflow does not "
11226 "occur when simplifying A / (B << N)"),
11227 WARN_STRICT_OVERFLOW_MISC);
11229 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11230 sh_cnt, build_int_cst (NULL_TREE, pow2));
11231 return fold_build2 (RSHIFT_EXPR, type,
11232 fold_convert (type, arg0), sh_cnt);
11236 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11237 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11238 if (INTEGRAL_TYPE_P (type)
11239 && TYPE_UNSIGNED (type)
11240 && code == FLOOR_DIV_EXPR)
11241 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11245 case ROUND_DIV_EXPR:
11246 case CEIL_DIV_EXPR:
11247 case EXACT_DIV_EXPR:
11248 if (integer_onep (arg1))
11249 return non_lvalue (fold_convert (type, arg0));
11250 if (integer_zerop (arg1))
11252 /* X / -1 is -X. */
11253 if (!TYPE_UNSIGNED (type)
11254 && TREE_CODE (arg1) == INTEGER_CST
11255 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11256 && TREE_INT_CST_HIGH (arg1) == -1)
11257 return fold_convert (type, negate_expr (arg0));
11259 /* Convert -A / -B to A / B when the type is signed and overflow is
11261 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11262 && TREE_CODE (arg0) == NEGATE_EXPR
11263 && negate_expr_p (arg1))
11265 if (INTEGRAL_TYPE_P (type))
11266 fold_overflow_warning (("assuming signed overflow does not occur "
11267 "when distributing negation across "
11269 WARN_STRICT_OVERFLOW_MISC);
11270 return fold_build2 (code, type,
11271 fold_convert (type, TREE_OPERAND (arg0, 0)),
11272 negate_expr (arg1));
11274 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11275 && TREE_CODE (arg1) == NEGATE_EXPR
11276 && negate_expr_p (arg0))
11278 if (INTEGRAL_TYPE_P (type))
11279 fold_overflow_warning (("assuming signed overflow does not occur "
11280 "when distributing negation across "
11282 WARN_STRICT_OVERFLOW_MISC);
11283 return fold_build2 (code, type, negate_expr (arg0),
11284 TREE_OPERAND (arg1, 0));
11287 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11288 operation, EXACT_DIV_EXPR.
11290 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11291 At one time others generated faster code, it's not clear if they do
11292 after the last round to changes to the DIV code in expmed.c. */
11293 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11294 && multiple_of_p (type, arg0, arg1))
11295 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11297 strict_overflow_p = false;
11298 if (TREE_CODE (arg1) == INTEGER_CST
11299 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11300 &strict_overflow_p)))
11302 if (strict_overflow_p)
11303 fold_overflow_warning (("assuming signed overflow does not occur "
11304 "when simplifying division"),
11305 WARN_STRICT_OVERFLOW_MISC);
11306 return fold_convert (type, tem);
11311 case CEIL_MOD_EXPR:
11312 case FLOOR_MOD_EXPR:
11313 case ROUND_MOD_EXPR:
11314 case TRUNC_MOD_EXPR:
11315 /* X % 1 is always zero, but be sure to preserve any side
11317 if (integer_onep (arg1))
11318 return omit_one_operand (type, integer_zero_node, arg0);
11320 /* X % 0, return X % 0 unchanged so that we can get the
11321 proper warnings and errors. */
11322 if (integer_zerop (arg1))
11325 /* 0 % X is always zero, but be sure to preserve any side
11326 effects in X. Place this after checking for X == 0. */
11327 if (integer_zerop (arg0))
11328 return omit_one_operand (type, integer_zero_node, arg1);
11330 /* X % -1 is zero. */
11331 if (!TYPE_UNSIGNED (type)
11332 && TREE_CODE (arg1) == INTEGER_CST
11333 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11334 && TREE_INT_CST_HIGH (arg1) == -1)
11335 return omit_one_operand (type, integer_zero_node, arg0);
11337 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11338 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11339 strict_overflow_p = false;
11340 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11341 && (TYPE_UNSIGNED (type)
11342 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11345 /* Also optimize A % (C << N) where C is a power of 2,
11346 to A & ((C << N) - 1). */
11347 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11348 c = TREE_OPERAND (arg1, 0);
11350 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11352 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11353 build_int_cst (TREE_TYPE (arg1), 1));
11354 if (strict_overflow_p)
11355 fold_overflow_warning (("assuming signed overflow does not "
11356 "occur when simplifying "
11357 "X % (power of two)"),
11358 WARN_STRICT_OVERFLOW_MISC);
11359 return fold_build2 (BIT_AND_EXPR, type,
11360 fold_convert (type, arg0),
11361 fold_convert (type, mask));
11365 /* X % -C is the same as X % C. */
11366 if (code == TRUNC_MOD_EXPR
11367 && !TYPE_UNSIGNED (type)
11368 && TREE_CODE (arg1) == INTEGER_CST
11369 && !TREE_OVERFLOW (arg1)
11370 && TREE_INT_CST_HIGH (arg1) < 0
11371 && !TYPE_OVERFLOW_TRAPS (type)
11372 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11373 && !sign_bit_p (arg1, arg1))
11374 return fold_build2 (code, type, fold_convert (type, arg0),
11375 fold_convert (type, negate_expr (arg1)));
11377 /* X % -Y is the same as X % Y. */
11378 if (code == TRUNC_MOD_EXPR
11379 && !TYPE_UNSIGNED (type)
11380 && TREE_CODE (arg1) == NEGATE_EXPR
11381 && !TYPE_OVERFLOW_TRAPS (type))
11382 return fold_build2 (code, type, fold_convert (type, arg0),
11383 fold_convert (type, TREE_OPERAND (arg1, 0)));
11385 if (TREE_CODE (arg1) == INTEGER_CST
11386 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11387 &strict_overflow_p)))
11389 if (strict_overflow_p)
11390 fold_overflow_warning (("assuming signed overflow does not occur "
11391 "when simplifying modulus"),
11392 WARN_STRICT_OVERFLOW_MISC);
11393 return fold_convert (type, tem);
11400 if (integer_all_onesp (arg0))
11401 return omit_one_operand (type, arg0, arg1);
11405 /* Optimize -1 >> x for arithmetic right shifts. */
11406 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11407 return omit_one_operand (type, arg0, arg1);
11408 /* ... fall through ... */
11412 if (integer_zerop (arg1))
11413 return non_lvalue (fold_convert (type, arg0));
11414 if (integer_zerop (arg0))
11415 return omit_one_operand (type, arg0, arg1);
11417 /* Since negative shift count is not well-defined,
11418 don't try to compute it in the compiler. */
11419 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11422 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11423 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11424 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11425 && host_integerp (TREE_OPERAND (arg0, 1), false)
11426 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11428 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11429 + TREE_INT_CST_LOW (arg1));
11431 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11432 being well defined. */
11433 if (low >= TYPE_PRECISION (type))
11435 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11436 low = low % TYPE_PRECISION (type);
11437 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11438 return build_int_cst (type, 0);
11440 low = TYPE_PRECISION (type) - 1;
11443 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11444 build_int_cst (type, low));
11447 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11448 into x & ((unsigned)-1 >> c) for unsigned types. */
11449 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11450 || (TYPE_UNSIGNED (type)
11451 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11452 && host_integerp (arg1, false)
11453 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11454 && host_integerp (TREE_OPERAND (arg0, 1), false)
11455 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11457 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11458 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11464 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11466 lshift = build_int_cst (type, -1);
11467 lshift = int_const_binop (code, lshift, arg1, 0);
11469 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11473 /* Rewrite an LROTATE_EXPR by a constant into an
11474 RROTATE_EXPR by a new constant. */
11475 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11477 tree tem = build_int_cst (TREE_TYPE (arg1),
11478 TYPE_PRECISION (type));
11479 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11480 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11483 /* If we have a rotate of a bit operation with the rotate count and
11484 the second operand of the bit operation both constant,
11485 permute the two operations. */
11486 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11487 && (TREE_CODE (arg0) == BIT_AND_EXPR
11488 || TREE_CODE (arg0) == BIT_IOR_EXPR
11489 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11490 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11491 return fold_build2 (TREE_CODE (arg0), type,
11492 fold_build2 (code, type,
11493 TREE_OPERAND (arg0, 0), arg1),
11494 fold_build2 (code, type,
11495 TREE_OPERAND (arg0, 1), arg1));
11497 /* Two consecutive rotates adding up to the precision of the
11498 type can be ignored. */
11499 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11500 && TREE_CODE (arg0) == RROTATE_EXPR
11501 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11502 && TREE_INT_CST_HIGH (arg1) == 0
11503 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11504 && ((TREE_INT_CST_LOW (arg1)
11505 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11506 == (unsigned int) TYPE_PRECISION (type)))
11507 return TREE_OPERAND (arg0, 0);
11509 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11510 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11511 if the latter can be further optimized. */
11512 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11513 && TREE_CODE (arg0) == BIT_AND_EXPR
11514 && TREE_CODE (arg1) == INTEGER_CST
11515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11517 tree mask = fold_build2 (code, type,
11518 fold_convert (type, TREE_OPERAND (arg0, 1)),
11520 tree shift = fold_build2 (code, type,
11521 fold_convert (type, TREE_OPERAND (arg0, 0)),
11523 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11531 if (operand_equal_p (arg0, arg1, 0))
11532 return omit_one_operand (type, arg0, arg1);
11533 if (INTEGRAL_TYPE_P (type)
11534 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11535 return omit_one_operand (type, arg1, arg0);
11536 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11542 if (operand_equal_p (arg0, arg1, 0))
11543 return omit_one_operand (type, arg0, arg1);
11544 if (INTEGRAL_TYPE_P (type)
11545 && TYPE_MAX_VALUE (type)
11546 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11547 return omit_one_operand (type, arg1, arg0);
11548 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11553 case TRUTH_ANDIF_EXPR:
11554 /* Note that the operands of this must be ints
11555 and their values must be 0 or 1.
11556 ("true" is a fixed value perhaps depending on the language.) */
11557 /* If first arg is constant zero, return it. */
11558 if (integer_zerop (arg0))
11559 return fold_convert (type, arg0);
11560 case TRUTH_AND_EXPR:
11561 /* If either arg is constant true, drop it. */
11562 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11563 return non_lvalue (fold_convert (type, arg1));
11564 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11565 /* Preserve sequence points. */
11566 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11567 return non_lvalue (fold_convert (type, arg0));
11568 /* If second arg is constant zero, result is zero, but first arg
11569 must be evaluated. */
11570 if (integer_zerop (arg1))
11571 return omit_one_operand (type, arg1, arg0);
11572 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11573 case will be handled here. */
11574 if (integer_zerop (arg0))
11575 return omit_one_operand (type, arg0, arg1);
11577 /* !X && X is always false. */
11578 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11579 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11580 return omit_one_operand (type, integer_zero_node, arg1);
11581 /* X && !X is always false. */
11582 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11583 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11584 return omit_one_operand (type, integer_zero_node, arg0);
11586 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11587 means A >= Y && A != MAX, but in this case we know that
11590 if (!TREE_SIDE_EFFECTS (arg0)
11591 && !TREE_SIDE_EFFECTS (arg1))
11593 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11594 if (tem && !operand_equal_p (tem, arg0, 0))
11595 return fold_build2 (code, type, tem, arg1);
11597 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11598 if (tem && !operand_equal_p (tem, arg1, 0))
11599 return fold_build2 (code, type, arg0, tem);
11603 /* We only do these simplifications if we are optimizing. */
11607 /* Check for things like (A || B) && (A || C). We can convert this
11608 to A || (B && C). Note that either operator can be any of the four
11609 truth and/or operations and the transformation will still be
11610 valid. Also note that we only care about order for the
11611 ANDIF and ORIF operators. If B contains side effects, this
11612 might change the truth-value of A. */
11613 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11614 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11615 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11616 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11617 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11618 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11620 tree a00 = TREE_OPERAND (arg0, 0);
11621 tree a01 = TREE_OPERAND (arg0, 1);
11622 tree a10 = TREE_OPERAND (arg1, 0);
11623 tree a11 = TREE_OPERAND (arg1, 1);
11624 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11625 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11626 && (code == TRUTH_AND_EXPR
11627 || code == TRUTH_OR_EXPR));
11629 if (operand_equal_p (a00, a10, 0))
11630 return fold_build2 (TREE_CODE (arg0), type, a00,
11631 fold_build2 (code, type, a01, a11));
11632 else if (commutative && operand_equal_p (a00, a11, 0))
11633 return fold_build2 (TREE_CODE (arg0), type, a00,
11634 fold_build2 (code, type, a01, a10));
11635 else if (commutative && operand_equal_p (a01, a10, 0))
11636 return fold_build2 (TREE_CODE (arg0), type, a01,
11637 fold_build2 (code, type, a00, a11));
11639 /* This case if tricky because we must either have commutative
11640 operators or else A10 must not have side-effects. */
11642 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11643 && operand_equal_p (a01, a11, 0))
11644 return fold_build2 (TREE_CODE (arg0), type,
11645 fold_build2 (code, type, a00, a10),
11649 /* See if we can build a range comparison. */
11650 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11653 /* Check for the possibility of merging component references. If our
11654 lhs is another similar operation, try to merge its rhs with our
11655 rhs. Then try to merge our lhs and rhs. */
11656 if (TREE_CODE (arg0) == code
11657 && 0 != (tem = fold_truthop (code, type,
11658 TREE_OPERAND (arg0, 1), arg1)))
11659 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11661 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11666 case TRUTH_ORIF_EXPR:
11667 /* Note that the operands of this must be ints
11668 and their values must be 0 or true.
11669 ("true" is a fixed value perhaps depending on the language.) */
11670 /* If first arg is constant true, return it. */
11671 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11672 return fold_convert (type, arg0);
11673 case TRUTH_OR_EXPR:
11674 /* If either arg is constant zero, drop it. */
11675 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11676 return non_lvalue (fold_convert (type, arg1));
11677 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11678 /* Preserve sequence points. */
11679 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11680 return non_lvalue (fold_convert (type, arg0));
11681 /* If second arg is constant true, result is true, but we must
11682 evaluate first arg. */
11683 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11684 return omit_one_operand (type, arg1, arg0);
11685 /* Likewise for first arg, but note this only occurs here for
11687 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11688 return omit_one_operand (type, arg0, arg1);
11690 /* !X || X is always true. */
11691 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11692 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11693 return omit_one_operand (type, integer_one_node, arg1);
11694 /* X || !X is always true. */
11695 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11696 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11697 return omit_one_operand (type, integer_one_node, arg0);
11701 case TRUTH_XOR_EXPR:
11702 /* If the second arg is constant zero, drop it. */
11703 if (integer_zerop (arg1))
11704 return non_lvalue (fold_convert (type, arg0));
11705 /* If the second arg is constant true, this is a logical inversion. */
11706 if (integer_onep (arg1))
11708 /* Only call invert_truthvalue if operand is a truth value. */
11709 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11710 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11712 tem = invert_truthvalue (arg0);
11713 return non_lvalue (fold_convert (type, tem));
11715 /* Identical arguments cancel to zero. */
11716 if (operand_equal_p (arg0, arg1, 0))
11717 return omit_one_operand (type, integer_zero_node, arg0);
11719 /* !X ^ X is always true. */
11720 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11721 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11722 return omit_one_operand (type, integer_one_node, arg1);
11724 /* X ^ !X is always true. */
11725 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11726 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11727 return omit_one_operand (type, integer_one_node, arg0);
11733 tem = fold_comparison (code, type, op0, op1);
11734 if (tem != NULL_TREE)
11737 /* bool_var != 0 becomes bool_var. */
11738 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11739 && code == NE_EXPR)
11740 return non_lvalue (fold_convert (type, arg0));
11742 /* bool_var == 1 becomes bool_var. */
11743 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11744 && code == EQ_EXPR)
11745 return non_lvalue (fold_convert (type, arg0));
11747 /* bool_var != 1 becomes !bool_var. */
11748 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11749 && code == NE_EXPR)
11750 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11752 /* bool_var == 0 becomes !bool_var. */
11753 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11754 && code == EQ_EXPR)
11755 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11757 /* If this is an equality comparison of the address of two non-weak,
11758 unaliased symbols neither of which are extern (since we do not
11759 have access to attributes for externs), then we know the result. */
11760 if (TREE_CODE (arg0) == ADDR_EXPR
11761 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11762 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11763 && ! lookup_attribute ("alias",
11764 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11765 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11766 && TREE_CODE (arg1) == ADDR_EXPR
11767 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11768 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11769 && ! lookup_attribute ("alias",
11770 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11771 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11773 /* We know that we're looking at the address of two
11774 non-weak, unaliased, static _DECL nodes.
11776 It is both wasteful and incorrect to call operand_equal_p
11777 to compare the two ADDR_EXPR nodes. It is wasteful in that
11778 all we need to do is test pointer equality for the arguments
11779 to the two ADDR_EXPR nodes. It is incorrect to use
11780 operand_equal_p as that function is NOT equivalent to a
11781 C equality test. It can in fact return false for two
11782 objects which would test as equal using the C equality
11784 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11785 return constant_boolean_node (equal
11786 ? code == EQ_EXPR : code != EQ_EXPR,
11790 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11791 a MINUS_EXPR of a constant, we can convert it into a comparison with
11792 a revised constant as long as no overflow occurs. */
11793 if (TREE_CODE (arg1) == INTEGER_CST
11794 && (TREE_CODE (arg0) == PLUS_EXPR
11795 || TREE_CODE (arg0) == MINUS_EXPR)
11796 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11797 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11798 ? MINUS_EXPR : PLUS_EXPR,
11799 fold_convert (TREE_TYPE (arg0), arg1),
11800 TREE_OPERAND (arg0, 1), 0))
11801 && !TREE_OVERFLOW (tem))
11802 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11804 /* Similarly for a NEGATE_EXPR. */
11805 if (TREE_CODE (arg0) == NEGATE_EXPR
11806 && TREE_CODE (arg1) == INTEGER_CST
11807 && 0 != (tem = negate_expr (arg1))
11808 && TREE_CODE (tem) == INTEGER_CST
11809 && !TREE_OVERFLOW (tem))
11810 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11812 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11813 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11814 && TREE_CODE (arg1) == INTEGER_CST
11815 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11816 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11817 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11818 fold_convert (TREE_TYPE (arg0), arg1),
11819 TREE_OPERAND (arg0, 1)));
11821 /* Transform comparisons of the form X +- C CMP X. */
11822 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11823 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11824 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11825 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11826 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11828 tree cst = TREE_OPERAND (arg0, 1);
11830 if (code == EQ_EXPR
11831 && !integer_zerop (cst))
11832 return omit_two_operands (type, boolean_false_node,
11833 TREE_OPERAND (arg0, 0), arg1);
11835 return omit_two_operands (type, boolean_true_node,
11836 TREE_OPERAND (arg0, 0), arg1);
11839 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11840 for !=. Don't do this for ordered comparisons due to overflow. */
11841 if (TREE_CODE (arg0) == MINUS_EXPR
11842 && integer_zerop (arg1))
11843 return fold_build2 (code, type,
11844 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11846 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11847 if (TREE_CODE (arg0) == ABS_EXPR
11848 && (integer_zerop (arg1) || real_zerop (arg1)))
11849 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11851 /* If this is an EQ or NE comparison with zero and ARG0 is
11852 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11853 two operations, but the latter can be done in one less insn
11854 on machines that have only two-operand insns or on which a
11855 constant cannot be the first operand. */
11856 if (TREE_CODE (arg0) == BIT_AND_EXPR
11857 && integer_zerop (arg1))
11859 tree arg00 = TREE_OPERAND (arg0, 0);
11860 tree arg01 = TREE_OPERAND (arg0, 1);
11861 if (TREE_CODE (arg00) == LSHIFT_EXPR
11862 && integer_onep (TREE_OPERAND (arg00, 0)))
11864 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11865 arg01, TREE_OPERAND (arg00, 1));
11866 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11867 build_int_cst (TREE_TYPE (arg0), 1));
11868 return fold_build2 (code, type,
11869 fold_convert (TREE_TYPE (arg1), tem), arg1);
11871 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11872 && integer_onep (TREE_OPERAND (arg01, 0)))
11874 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11875 arg00, TREE_OPERAND (arg01, 1));
11876 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11877 build_int_cst (TREE_TYPE (arg0), 1));
11878 return fold_build2 (code, type,
11879 fold_convert (TREE_TYPE (arg1), tem), arg1);
11883 /* If this is an NE or EQ comparison of zero against the result of a
11884 signed MOD operation whose second operand is a power of 2, make
11885 the MOD operation unsigned since it is simpler and equivalent. */
11886 if (integer_zerop (arg1)
11887 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11888 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11889 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11890 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11891 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11892 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11894 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11895 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11896 fold_convert (newtype,
11897 TREE_OPERAND (arg0, 0)),
11898 fold_convert (newtype,
11899 TREE_OPERAND (arg0, 1)));
11901 return fold_build2 (code, type, newmod,
11902 fold_convert (newtype, arg1));
11905 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11906 C1 is a valid shift constant, and C2 is a power of two, i.e.
11908 if (TREE_CODE (arg0) == BIT_AND_EXPR
11909 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11910 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11912 && integer_pow2p (TREE_OPERAND (arg0, 1))
11913 && integer_zerop (arg1))
11915 tree itype = TREE_TYPE (arg0);
11916 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11917 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11919 /* Check for a valid shift count. */
11920 if (TREE_INT_CST_HIGH (arg001) == 0
11921 && TREE_INT_CST_LOW (arg001) < prec)
11923 tree arg01 = TREE_OPERAND (arg0, 1);
11924 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11925 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11926 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11927 can be rewritten as (X & (C2 << C1)) != 0. */
11928 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11930 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11931 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11932 return fold_build2 (code, type, tem, arg1);
11934 /* Otherwise, for signed (arithmetic) shifts,
11935 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11936 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11937 else if (!TYPE_UNSIGNED (itype))
11938 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11939 arg000, build_int_cst (itype, 0));
11940 /* Otherwise, of unsigned (logical) shifts,
11941 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11942 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11944 return omit_one_operand (type,
11945 code == EQ_EXPR ? integer_one_node
11946 : integer_zero_node,
11951 /* If this is an NE comparison of zero with an AND of one, remove the
11952 comparison since the AND will give the correct value. */
11953 if (code == NE_EXPR
11954 && integer_zerop (arg1)
11955 && TREE_CODE (arg0) == BIT_AND_EXPR
11956 && integer_onep (TREE_OPERAND (arg0, 1)))
11957 return fold_convert (type, arg0);
11959 /* If we have (A & C) == C where C is a power of 2, convert this into
11960 (A & C) != 0. Similarly for NE_EXPR. */
11961 if (TREE_CODE (arg0) == BIT_AND_EXPR
11962 && integer_pow2p (TREE_OPERAND (arg0, 1))
11963 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11964 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11965 arg0, fold_convert (TREE_TYPE (arg0),
11966 integer_zero_node));
11968 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11969 bit, then fold the expression into A < 0 or A >= 0. */
11970 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11974 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11975 Similarly for NE_EXPR. */
11976 if (TREE_CODE (arg0) == BIT_AND_EXPR
11977 && TREE_CODE (arg1) == INTEGER_CST
11978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11980 tree notc = fold_build1 (BIT_NOT_EXPR,
11981 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11982 TREE_OPERAND (arg0, 1));
11983 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11985 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11986 if (integer_nonzerop (dandnotc))
11987 return omit_one_operand (type, rslt, arg0);
11990 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11991 Similarly for NE_EXPR. */
11992 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11993 && TREE_CODE (arg1) == INTEGER_CST
11994 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11996 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11997 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11998 TREE_OPERAND (arg0, 1), notd);
11999 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12000 if (integer_nonzerop (candnotd))
12001 return omit_one_operand (type, rslt, arg0);
12004 /* Optimize comparisons of strlen vs zero to a compare of the
12005 first character of the string vs zero. To wit,
12006 strlen(ptr) == 0 => *ptr == 0
12007 strlen(ptr) != 0 => *ptr != 0
12008 Other cases should reduce to one of these two (or a constant)
12009 due to the return value of strlen being unsigned. */
12010 if (TREE_CODE (arg0) == CALL_EXPR
12011 && integer_zerop (arg1))
12013 tree fndecl = get_callee_fndecl (arg0);
12016 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12017 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12018 && call_expr_nargs (arg0) == 1
12019 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12021 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12022 return fold_build2 (code, type, iref,
12023 build_int_cst (TREE_TYPE (iref), 0));
12027 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12028 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12029 if (TREE_CODE (arg0) == RSHIFT_EXPR
12030 && integer_zerop (arg1)
12031 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12033 tree arg00 = TREE_OPERAND (arg0, 0);
12034 tree arg01 = TREE_OPERAND (arg0, 1);
12035 tree itype = TREE_TYPE (arg00);
12036 if (TREE_INT_CST_HIGH (arg01) == 0
12037 && TREE_INT_CST_LOW (arg01)
12038 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12040 if (TYPE_UNSIGNED (itype))
12042 itype = signed_type_for (itype);
12043 arg00 = fold_convert (itype, arg00);
12045 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12046 type, arg00, build_int_cst (itype, 0));
12050 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12051 if (integer_zerop (arg1)
12052 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12053 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12054 TREE_OPERAND (arg0, 1));
12056 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12057 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12058 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12059 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12060 build_int_cst (TREE_TYPE (arg1), 0));
12061 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12062 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12063 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12064 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12065 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12066 build_int_cst (TREE_TYPE (arg1), 0));
12068 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12069 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12070 && TREE_CODE (arg1) == INTEGER_CST
12071 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12072 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12073 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12074 TREE_OPERAND (arg0, 1), arg1));
12076 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12077 (X & C) == 0 when C is a single bit. */
12078 if (TREE_CODE (arg0) == BIT_AND_EXPR
12079 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12080 && integer_zerop (arg1)
12081 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12083 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12084 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12085 TREE_OPERAND (arg0, 1));
12086 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12090 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12091 constant C is a power of two, i.e. a single bit. */
12092 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12093 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12094 && integer_zerop (arg1)
12095 && integer_pow2p (TREE_OPERAND (arg0, 1))
12096 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12097 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12099 tree arg00 = TREE_OPERAND (arg0, 0);
12100 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12101 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12104 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12105 when is C is a power of two, i.e. a single bit. */
12106 if (TREE_CODE (arg0) == BIT_AND_EXPR
12107 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12108 && integer_zerop (arg1)
12109 && integer_pow2p (TREE_OPERAND (arg0, 1))
12110 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12111 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12113 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12114 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12115 arg000, TREE_OPERAND (arg0, 1));
12116 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12117 tem, build_int_cst (TREE_TYPE (tem), 0));
12120 if (integer_zerop (arg1)
12121 && tree_expr_nonzero_p (arg0))
12123 tree res = constant_boolean_node (code==NE_EXPR, type);
12124 return omit_one_operand (type, res, arg0);
12127 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12128 if (TREE_CODE (arg0) == NEGATE_EXPR
12129 && TREE_CODE (arg1) == NEGATE_EXPR)
12130 return fold_build2 (code, type,
12131 TREE_OPERAND (arg0, 0),
12132 TREE_OPERAND (arg1, 0));
12134 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12135 if (TREE_CODE (arg0) == BIT_AND_EXPR
12136 && TREE_CODE (arg1) == BIT_AND_EXPR)
12138 tree arg00 = TREE_OPERAND (arg0, 0);
12139 tree arg01 = TREE_OPERAND (arg0, 1);
12140 tree arg10 = TREE_OPERAND (arg1, 0);
12141 tree arg11 = TREE_OPERAND (arg1, 1);
12142 tree itype = TREE_TYPE (arg0);
12144 if (operand_equal_p (arg01, arg11, 0))
12145 return fold_build2 (code, type,
12146 fold_build2 (BIT_AND_EXPR, itype,
12147 fold_build2 (BIT_XOR_EXPR, itype,
12150 build_int_cst (itype, 0));
12152 if (operand_equal_p (arg01, arg10, 0))
12153 return fold_build2 (code, type,
12154 fold_build2 (BIT_AND_EXPR, itype,
12155 fold_build2 (BIT_XOR_EXPR, itype,
12158 build_int_cst (itype, 0));
12160 if (operand_equal_p (arg00, arg11, 0))
12161 return fold_build2 (code, type,
12162 fold_build2 (BIT_AND_EXPR, itype,
12163 fold_build2 (BIT_XOR_EXPR, itype,
12166 build_int_cst (itype, 0));
12168 if (operand_equal_p (arg00, arg10, 0))
12169 return fold_build2 (code, type,
12170 fold_build2 (BIT_AND_EXPR, itype,
12171 fold_build2 (BIT_XOR_EXPR, itype,
12174 build_int_cst (itype, 0));
12177 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12178 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12180 tree arg00 = TREE_OPERAND (arg0, 0);
12181 tree arg01 = TREE_OPERAND (arg0, 1);
12182 tree arg10 = TREE_OPERAND (arg1, 0);
12183 tree arg11 = TREE_OPERAND (arg1, 1);
12184 tree itype = TREE_TYPE (arg0);
12186 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12187 operand_equal_p guarantees no side-effects so we don't need
12188 to use omit_one_operand on Z. */
12189 if (operand_equal_p (arg01, arg11, 0))
12190 return fold_build2 (code, type, arg00, arg10);
12191 if (operand_equal_p (arg01, arg10, 0))
12192 return fold_build2 (code, type, arg00, arg11);
12193 if (operand_equal_p (arg00, arg11, 0))
12194 return fold_build2 (code, type, arg01, arg10);
12195 if (operand_equal_p (arg00, arg10, 0))
12196 return fold_build2 (code, type, arg01, arg11);
12198 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12199 if (TREE_CODE (arg01) == INTEGER_CST
12200 && TREE_CODE (arg11) == INTEGER_CST)
12201 return fold_build2 (code, type,
12202 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12203 fold_build2 (BIT_XOR_EXPR, itype,
12208 /* Attempt to simplify equality/inequality comparisons of complex
12209 values. Only lower the comparison if the result is known or
12210 can be simplified to a single scalar comparison. */
12211 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12212 || TREE_CODE (arg0) == COMPLEX_CST)
12213 && (TREE_CODE (arg1) == COMPLEX_EXPR
12214 || TREE_CODE (arg1) == COMPLEX_CST))
12216 tree real0, imag0, real1, imag1;
12219 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12221 real0 = TREE_OPERAND (arg0, 0);
12222 imag0 = TREE_OPERAND (arg0, 1);
12226 real0 = TREE_REALPART (arg0);
12227 imag0 = TREE_IMAGPART (arg0);
12230 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12232 real1 = TREE_OPERAND (arg1, 0);
12233 imag1 = TREE_OPERAND (arg1, 1);
12237 real1 = TREE_REALPART (arg1);
12238 imag1 = TREE_IMAGPART (arg1);
12241 rcond = fold_binary (code, type, real0, real1);
12242 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12244 if (integer_zerop (rcond))
12246 if (code == EQ_EXPR)
12247 return omit_two_operands (type, boolean_false_node,
12249 return fold_build2 (NE_EXPR, type, imag0, imag1);
12253 if (code == NE_EXPR)
12254 return omit_two_operands (type, boolean_true_node,
12256 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12260 icond = fold_binary (code, type, imag0, imag1);
12261 if (icond && TREE_CODE (icond) == INTEGER_CST)
12263 if (integer_zerop (icond))
12265 if (code == EQ_EXPR)
12266 return omit_two_operands (type, boolean_false_node,
12268 return fold_build2 (NE_EXPR, type, real0, real1);
12272 if (code == NE_EXPR)
12273 return omit_two_operands (type, boolean_true_node,
12275 return fold_build2 (EQ_EXPR, type, real0, real1);
12286 tem = fold_comparison (code, type, op0, op1);
12287 if (tem != NULL_TREE)
12290 /* Transform comparisons of the form X +- C CMP X. */
12291 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12292 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12293 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12294 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12295 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12296 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12298 tree arg01 = TREE_OPERAND (arg0, 1);
12299 enum tree_code code0 = TREE_CODE (arg0);
12302 if (TREE_CODE (arg01) == REAL_CST)
12303 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12305 is_positive = tree_int_cst_sgn (arg01);
12307 /* (X - c) > X becomes false. */
12308 if (code == GT_EXPR
12309 && ((code0 == MINUS_EXPR && is_positive >= 0)
12310 || (code0 == PLUS_EXPR && is_positive <= 0)))
12312 if (TREE_CODE (arg01) == INTEGER_CST
12313 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12314 fold_overflow_warning (("assuming signed overflow does not "
12315 "occur when assuming that (X - c) > X "
12316 "is always false"),
12317 WARN_STRICT_OVERFLOW_ALL);
12318 return constant_boolean_node (0, type);
12321 /* Likewise (X + c) < X becomes false. */
12322 if (code == LT_EXPR
12323 && ((code0 == PLUS_EXPR && is_positive >= 0)
12324 || (code0 == MINUS_EXPR && is_positive <= 0)))
12326 if (TREE_CODE (arg01) == INTEGER_CST
12327 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12328 fold_overflow_warning (("assuming signed overflow does not "
12329 "occur when assuming that "
12330 "(X + c) < X is always false"),
12331 WARN_STRICT_OVERFLOW_ALL);
12332 return constant_boolean_node (0, type);
12335 /* Convert (X - c) <= X to true. */
12336 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12338 && ((code0 == MINUS_EXPR && is_positive >= 0)
12339 || (code0 == PLUS_EXPR && is_positive <= 0)))
12341 if (TREE_CODE (arg01) == INTEGER_CST
12342 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12343 fold_overflow_warning (("assuming signed overflow does not "
12344 "occur when assuming that "
12345 "(X - c) <= X is always true"),
12346 WARN_STRICT_OVERFLOW_ALL);
12347 return constant_boolean_node (1, type);
12350 /* Convert (X + c) >= X to true. */
12351 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12353 && ((code0 == PLUS_EXPR && is_positive >= 0)
12354 || (code0 == MINUS_EXPR && is_positive <= 0)))
12356 if (TREE_CODE (arg01) == INTEGER_CST
12357 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12358 fold_overflow_warning (("assuming signed overflow does not "
12359 "occur when assuming that "
12360 "(X + c) >= X is always true"),
12361 WARN_STRICT_OVERFLOW_ALL);
12362 return constant_boolean_node (1, type);
12365 if (TREE_CODE (arg01) == INTEGER_CST)
12367 /* Convert X + c > X and X - c < X to true for integers. */
12368 if (code == GT_EXPR
12369 && ((code0 == PLUS_EXPR && is_positive > 0)
12370 || (code0 == MINUS_EXPR && is_positive < 0)))
12372 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12373 fold_overflow_warning (("assuming signed overflow does "
12374 "not occur when assuming that "
12375 "(X + c) > X is always true"),
12376 WARN_STRICT_OVERFLOW_ALL);
12377 return constant_boolean_node (1, type);
12380 if (code == LT_EXPR
12381 && ((code0 == MINUS_EXPR && is_positive > 0)
12382 || (code0 == PLUS_EXPR && is_positive < 0)))
12384 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12385 fold_overflow_warning (("assuming signed overflow does "
12386 "not occur when assuming that "
12387 "(X - c) < X is always true"),
12388 WARN_STRICT_OVERFLOW_ALL);
12389 return constant_boolean_node (1, type);
12392 /* Convert X + c <= X and X - c >= X to false for integers. */
12393 if (code == LE_EXPR
12394 && ((code0 == PLUS_EXPR && is_positive > 0)
12395 || (code0 == MINUS_EXPR && is_positive < 0)))
12397 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12398 fold_overflow_warning (("assuming signed overflow does "
12399 "not occur when assuming that "
12400 "(X + c) <= X is always false"),
12401 WARN_STRICT_OVERFLOW_ALL);
12402 return constant_boolean_node (0, type);
12405 if (code == GE_EXPR
12406 && ((code0 == MINUS_EXPR && is_positive > 0)
12407 || (code0 == PLUS_EXPR && is_positive < 0)))
12409 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12410 fold_overflow_warning (("assuming signed overflow does "
12411 "not occur when assuming that "
12412 "(X - c) >= X is always false"),
12413 WARN_STRICT_OVERFLOW_ALL);
12414 return constant_boolean_node (0, type);
12419 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12420 This transformation affects the cases which are handled in later
12421 optimizations involving comparisons with non-negative constants. */
12422 if (TREE_CODE (arg1) == INTEGER_CST
12423 && TREE_CODE (arg0) != INTEGER_CST
12424 && tree_int_cst_sgn (arg1) > 0)
12426 if (code == GE_EXPR)
12428 arg1 = const_binop (MINUS_EXPR, arg1,
12429 build_int_cst (TREE_TYPE (arg1), 1), 0);
12430 return fold_build2 (GT_EXPR, type, arg0,
12431 fold_convert (TREE_TYPE (arg0), arg1));
12433 if (code == LT_EXPR)
12435 arg1 = const_binop (MINUS_EXPR, arg1,
12436 build_int_cst (TREE_TYPE (arg1), 1), 0);
12437 return fold_build2 (LE_EXPR, type, arg0,
12438 fold_convert (TREE_TYPE (arg0), arg1));
12442 /* Comparisons with the highest or lowest possible integer of
12443 the specified precision will have known values. */
12445 tree arg1_type = TREE_TYPE (arg1);
12446 unsigned int width = TYPE_PRECISION (arg1_type);
12448 if (TREE_CODE (arg1) == INTEGER_CST
12449 && !TREE_OVERFLOW (arg1)
12450 && width <= 2 * HOST_BITS_PER_WIDE_INT
12451 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12453 HOST_WIDE_INT signed_max_hi;
12454 unsigned HOST_WIDE_INT signed_max_lo;
12455 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12457 if (width <= HOST_BITS_PER_WIDE_INT)
12459 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12464 if (TYPE_UNSIGNED (arg1_type))
12466 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12472 max_lo = signed_max_lo;
12473 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12479 width -= HOST_BITS_PER_WIDE_INT;
12480 signed_max_lo = -1;
12481 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12486 if (TYPE_UNSIGNED (arg1_type))
12488 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12493 max_hi = signed_max_hi;
12494 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12498 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12499 && TREE_INT_CST_LOW (arg1) == max_lo)
12503 return omit_one_operand (type, integer_zero_node, arg0);
12506 return fold_build2 (EQ_EXPR, type, op0, op1);
12509 return omit_one_operand (type, integer_one_node, arg0);
12512 return fold_build2 (NE_EXPR, type, op0, op1);
12514 /* The GE_EXPR and LT_EXPR cases above are not normally
12515 reached because of previous transformations. */
12520 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12522 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12526 arg1 = const_binop (PLUS_EXPR, arg1,
12527 build_int_cst (TREE_TYPE (arg1), 1), 0);
12528 return fold_build2 (EQ_EXPR, type,
12529 fold_convert (TREE_TYPE (arg1), arg0),
12532 arg1 = const_binop (PLUS_EXPR, arg1,
12533 build_int_cst (TREE_TYPE (arg1), 1), 0);
12534 return fold_build2 (NE_EXPR, type,
12535 fold_convert (TREE_TYPE (arg1), arg0),
12540 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12542 && TREE_INT_CST_LOW (arg1) == min_lo)
12546 return omit_one_operand (type, integer_zero_node, arg0);
12549 return fold_build2 (EQ_EXPR, type, op0, op1);
12552 return omit_one_operand (type, integer_one_node, arg0);
12555 return fold_build2 (NE_EXPR, type, op0, op1);
12560 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12562 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12566 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12567 return fold_build2 (NE_EXPR, type,
12568 fold_convert (TREE_TYPE (arg1), arg0),
12571 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12572 return fold_build2 (EQ_EXPR, type,
12573 fold_convert (TREE_TYPE (arg1), arg0),
12579 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12580 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12581 && TYPE_UNSIGNED (arg1_type)
12582 /* We will flip the signedness of the comparison operator
12583 associated with the mode of arg1, so the sign bit is
12584 specified by this mode. Check that arg1 is the signed
12585 max associated with this sign bit. */
12586 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12587 /* signed_type does not work on pointer types. */
12588 && INTEGRAL_TYPE_P (arg1_type))
12590 /* The following case also applies to X < signed_max+1
12591 and X >= signed_max+1 because previous transformations. */
12592 if (code == LE_EXPR || code == GT_EXPR)
12595 st = signed_type_for (TREE_TYPE (arg1));
12596 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12597 type, fold_convert (st, arg0),
12598 build_int_cst (st, 0));
12604 /* If we are comparing an ABS_EXPR with a constant, we can
12605 convert all the cases into explicit comparisons, but they may
12606 well not be faster than doing the ABS and one comparison.
12607 But ABS (X) <= C is a range comparison, which becomes a subtraction
12608 and a comparison, and is probably faster. */
12609 if (code == LE_EXPR
12610 && TREE_CODE (arg1) == INTEGER_CST
12611 && TREE_CODE (arg0) == ABS_EXPR
12612 && ! TREE_SIDE_EFFECTS (arg0)
12613 && (0 != (tem = negate_expr (arg1)))
12614 && TREE_CODE (tem) == INTEGER_CST
12615 && !TREE_OVERFLOW (tem))
12616 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12617 build2 (GE_EXPR, type,
12618 TREE_OPERAND (arg0, 0), tem),
12619 build2 (LE_EXPR, type,
12620 TREE_OPERAND (arg0, 0), arg1));
12622 /* Convert ABS_EXPR<x> >= 0 to true. */
12623 strict_overflow_p = false;
12624 if (code == GE_EXPR
12625 && (integer_zerop (arg1)
12626 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12627 && real_zerop (arg1)))
12628 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12630 if (strict_overflow_p)
12631 fold_overflow_warning (("assuming signed overflow does not occur "
12632 "when simplifying comparison of "
12633 "absolute value and zero"),
12634 WARN_STRICT_OVERFLOW_CONDITIONAL);
12635 return omit_one_operand (type, integer_one_node, arg0);
12638 /* Convert ABS_EXPR<x> < 0 to false. */
12639 strict_overflow_p = false;
12640 if (code == LT_EXPR
12641 && (integer_zerop (arg1) || real_zerop (arg1))
12642 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12644 if (strict_overflow_p)
12645 fold_overflow_warning (("assuming signed overflow does not occur "
12646 "when simplifying comparison of "
12647 "absolute value and zero"),
12648 WARN_STRICT_OVERFLOW_CONDITIONAL);
12649 return omit_one_operand (type, integer_zero_node, arg0);
12652 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12653 and similarly for >= into !=. */
12654 if ((code == LT_EXPR || code == GE_EXPR)
12655 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12656 && TREE_CODE (arg1) == LSHIFT_EXPR
12657 && integer_onep (TREE_OPERAND (arg1, 0)))
12658 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12659 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12660 TREE_OPERAND (arg1, 1)),
12661 build_int_cst (TREE_TYPE (arg0), 0));
12663 if ((code == LT_EXPR || code == GE_EXPR)
12664 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12665 && CONVERT_EXPR_P (arg1)
12666 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12667 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12669 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12670 fold_convert (TREE_TYPE (arg0),
12671 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12672 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12674 build_int_cst (TREE_TYPE (arg0), 0));
12678 case UNORDERED_EXPR:
12686 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12688 t1 = fold_relational_const (code, type, arg0, arg1);
12689 if (t1 != NULL_TREE)
12693 /* If the first operand is NaN, the result is constant. */
12694 if (TREE_CODE (arg0) == REAL_CST
12695 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12696 && (code != LTGT_EXPR || ! flag_trapping_math))
12698 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12699 ? integer_zero_node
12700 : integer_one_node;
12701 return omit_one_operand (type, t1, arg1);
12704 /* If the second operand is NaN, the result is constant. */
12705 if (TREE_CODE (arg1) == REAL_CST
12706 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12707 && (code != LTGT_EXPR || ! flag_trapping_math))
12709 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12710 ? integer_zero_node
12711 : integer_one_node;
12712 return omit_one_operand (type, t1, arg0);
12715 /* Simplify unordered comparison of something with itself. */
12716 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12717 && operand_equal_p (arg0, arg1, 0))
12718 return constant_boolean_node (1, type);
12720 if (code == LTGT_EXPR
12721 && !flag_trapping_math
12722 && operand_equal_p (arg0, arg1, 0))
12723 return constant_boolean_node (0, type);
12725 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12727 tree targ0 = strip_float_extensions (arg0);
12728 tree targ1 = strip_float_extensions (arg1);
12729 tree newtype = TREE_TYPE (targ0);
12731 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12732 newtype = TREE_TYPE (targ1);
12734 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12735 return fold_build2 (code, type, fold_convert (newtype, targ0),
12736 fold_convert (newtype, targ1));
12741 case COMPOUND_EXPR:
12742 /* When pedantic, a compound expression can be neither an lvalue
12743 nor an integer constant expression. */
12744 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12746 /* Don't let (0, 0) be null pointer constant. */
12747 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12748 : fold_convert (type, arg1);
12749 return pedantic_non_lvalue (tem);
12752 if ((TREE_CODE (arg0) == REAL_CST
12753 && TREE_CODE (arg1) == REAL_CST)
12754 || (TREE_CODE (arg0) == INTEGER_CST
12755 && TREE_CODE (arg1) == INTEGER_CST))
12756 return build_complex (type, arg0, arg1);
12760 /* An ASSERT_EXPR should never be passed to fold_binary. */
12761 gcc_unreachable ();
12765 } /* switch (code) */
12768 /* Callback for walk_tree, looking for LABEL_EXPR.
12769 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12770 Do not check the sub-tree of GOTO_EXPR. */
12773 contains_label_1 (tree *tp,
12774 int *walk_subtrees,
12775 void *data ATTRIBUTE_UNUSED)
12777 switch (TREE_CODE (*tp))
12782 *walk_subtrees = 0;
12789 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12790 accessible from outside the sub-tree. Returns NULL_TREE if no
12791 addressable label is found. */
12794 contains_label_p (tree st)
12796 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12799 /* Fold a ternary expression of code CODE and type TYPE with operands
12800 OP0, OP1, and OP2. Return the folded expression if folding is
12801 successful. Otherwise, return NULL_TREE. */
12804 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12807 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12808 enum tree_code_class kind = TREE_CODE_CLASS (code);
12810 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12811 && TREE_CODE_LENGTH (code) == 3);
12813 /* Strip any conversions that don't change the mode. This is safe
12814 for every expression, except for a comparison expression because
12815 its signedness is derived from its operands. So, in the latter
12816 case, only strip conversions that don't change the signedness.
12818 Note that this is done as an internal manipulation within the
12819 constant folder, in order to find the simplest representation of
12820 the arguments so that their form can be studied. In any cases,
12821 the appropriate type conversions should be put back in the tree
12822 that will get out of the constant folder. */
12837 case COMPONENT_REF:
12838 if (TREE_CODE (arg0) == CONSTRUCTOR
12839 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12841 unsigned HOST_WIDE_INT idx;
12843 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12850 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12851 so all simple results must be passed through pedantic_non_lvalue. */
12852 if (TREE_CODE (arg0) == INTEGER_CST)
12854 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12855 tem = integer_zerop (arg0) ? op2 : op1;
12856 /* Only optimize constant conditions when the selected branch
12857 has the same type as the COND_EXPR. This avoids optimizing
12858 away "c ? x : throw", where the throw has a void type.
12859 Avoid throwing away that operand which contains label. */
12860 if ((!TREE_SIDE_EFFECTS (unused_op)
12861 || !contains_label_p (unused_op))
12862 && (! VOID_TYPE_P (TREE_TYPE (tem))
12863 || VOID_TYPE_P (type)))
12864 return pedantic_non_lvalue (tem);
12867 if (operand_equal_p (arg1, op2, 0))
12868 return pedantic_omit_one_operand (type, arg1, arg0);
12870 /* If we have A op B ? A : C, we may be able to convert this to a
12871 simpler expression, depending on the operation and the values
12872 of B and C. Signed zeros prevent all of these transformations,
12873 for reasons given above each one.
12875 Also try swapping the arguments and inverting the conditional. */
12876 if (COMPARISON_CLASS_P (arg0)
12877 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12878 arg1, TREE_OPERAND (arg0, 1))
12879 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12881 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12886 if (COMPARISON_CLASS_P (arg0)
12887 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12889 TREE_OPERAND (arg0, 1))
12890 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12892 tem = fold_truth_not_expr (arg0);
12893 if (tem && COMPARISON_CLASS_P (tem))
12895 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12901 /* If the second operand is simpler than the third, swap them
12902 since that produces better jump optimization results. */
12903 if (truth_value_p (TREE_CODE (arg0))
12904 && tree_swap_operands_p (op1, op2, false))
12906 /* See if this can be inverted. If it can't, possibly because
12907 it was a floating-point inequality comparison, don't do
12909 tem = fold_truth_not_expr (arg0);
12911 return fold_build3 (code, type, tem, op2, op1);
12914 /* Convert A ? 1 : 0 to simply A. */
12915 if (integer_onep (op1)
12916 && integer_zerop (op2)
12917 /* If we try to convert OP0 to our type, the
12918 call to fold will try to move the conversion inside
12919 a COND, which will recurse. In that case, the COND_EXPR
12920 is probably the best choice, so leave it alone. */
12921 && type == TREE_TYPE (arg0))
12922 return pedantic_non_lvalue (arg0);
12924 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12925 over COND_EXPR in cases such as floating point comparisons. */
12926 if (integer_zerop (op1)
12927 && integer_onep (op2)
12928 && truth_value_p (TREE_CODE (arg0)))
12929 return pedantic_non_lvalue (fold_convert (type,
12930 invert_truthvalue (arg0)));
12932 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12933 if (TREE_CODE (arg0) == LT_EXPR
12934 && integer_zerop (TREE_OPERAND (arg0, 1))
12935 && integer_zerop (op2)
12936 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12938 /* sign_bit_p only checks ARG1 bits within A's precision.
12939 If <sign bit of A> has wider type than A, bits outside
12940 of A's precision in <sign bit of A> need to be checked.
12941 If they are all 0, this optimization needs to be done
12942 in unsigned A's type, if they are all 1 in signed A's type,
12943 otherwise this can't be done. */
12944 if (TYPE_PRECISION (TREE_TYPE (tem))
12945 < TYPE_PRECISION (TREE_TYPE (arg1))
12946 && TYPE_PRECISION (TREE_TYPE (tem))
12947 < TYPE_PRECISION (type))
12949 unsigned HOST_WIDE_INT mask_lo;
12950 HOST_WIDE_INT mask_hi;
12951 int inner_width, outer_width;
12954 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12955 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12956 if (outer_width > TYPE_PRECISION (type))
12957 outer_width = TYPE_PRECISION (type);
12959 if (outer_width > HOST_BITS_PER_WIDE_INT)
12961 mask_hi = ((unsigned HOST_WIDE_INT) -1
12962 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12968 mask_lo = ((unsigned HOST_WIDE_INT) -1
12969 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12971 if (inner_width > HOST_BITS_PER_WIDE_INT)
12973 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12974 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12978 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12979 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12981 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12982 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12984 tem_type = signed_type_for (TREE_TYPE (tem));
12985 tem = fold_convert (tem_type, tem);
12987 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12988 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12990 tem_type = unsigned_type_for (TREE_TYPE (tem));
12991 tem = fold_convert (tem_type, tem);
12998 return fold_convert (type,
12999 fold_build2 (BIT_AND_EXPR,
13000 TREE_TYPE (tem), tem,
13001 fold_convert (TREE_TYPE (tem),
13005 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13006 already handled above. */
13007 if (TREE_CODE (arg0) == BIT_AND_EXPR
13008 && integer_onep (TREE_OPERAND (arg0, 1))
13009 && integer_zerop (op2)
13010 && integer_pow2p (arg1))
13012 tree tem = TREE_OPERAND (arg0, 0);
13014 if (TREE_CODE (tem) == RSHIFT_EXPR
13015 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13016 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13017 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13018 return fold_build2 (BIT_AND_EXPR, type,
13019 TREE_OPERAND (tem, 0), arg1);
13022 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13023 is probably obsolete because the first operand should be a
13024 truth value (that's why we have the two cases above), but let's
13025 leave it in until we can confirm this for all front-ends. */
13026 if (integer_zerop (op2)
13027 && TREE_CODE (arg0) == NE_EXPR
13028 && integer_zerop (TREE_OPERAND (arg0, 1))
13029 && integer_pow2p (arg1)
13030 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13031 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13032 arg1, OEP_ONLY_CONST))
13033 return pedantic_non_lvalue (fold_convert (type,
13034 TREE_OPERAND (arg0, 0)));
13036 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13037 if (integer_zerop (op2)
13038 && truth_value_p (TREE_CODE (arg0))
13039 && truth_value_p (TREE_CODE (arg1)))
13040 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13041 fold_convert (type, arg0),
13044 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13045 if (integer_onep (op2)
13046 && truth_value_p (TREE_CODE (arg0))
13047 && truth_value_p (TREE_CODE (arg1)))
13049 /* Only perform transformation if ARG0 is easily inverted. */
13050 tem = fold_truth_not_expr (arg0);
13052 return fold_build2 (TRUTH_ORIF_EXPR, type,
13053 fold_convert (type, tem),
13057 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13058 if (integer_zerop (arg1)
13059 && truth_value_p (TREE_CODE (arg0))
13060 && truth_value_p (TREE_CODE (op2)))
13062 /* Only perform transformation if ARG0 is easily inverted. */
13063 tem = fold_truth_not_expr (arg0);
13065 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13066 fold_convert (type, tem),
13070 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13071 if (integer_onep (arg1)
13072 && truth_value_p (TREE_CODE (arg0))
13073 && truth_value_p (TREE_CODE (op2)))
13074 return fold_build2 (TRUTH_ORIF_EXPR, type,
13075 fold_convert (type, arg0),
13081 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13082 of fold_ternary on them. */
13083 gcc_unreachable ();
13085 case BIT_FIELD_REF:
13086 if ((TREE_CODE (arg0) == VECTOR_CST
13087 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13088 && type == TREE_TYPE (TREE_TYPE (arg0)))
13090 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13091 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13094 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13095 && (idx % width) == 0
13096 && (idx = idx / width)
13097 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13099 tree elements = NULL_TREE;
13101 if (TREE_CODE (arg0) == VECTOR_CST)
13102 elements = TREE_VECTOR_CST_ELTS (arg0);
13105 unsigned HOST_WIDE_INT idx;
13108 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13109 elements = tree_cons (NULL_TREE, value, elements);
13111 while (idx-- > 0 && elements)
13112 elements = TREE_CHAIN (elements);
13114 return TREE_VALUE (elements);
13116 return fold_convert (type, integer_zero_node);
13120 /* A bit-field-ref that referenced the full argument can be stripped. */
13121 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13122 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13123 && integer_zerop (op2))
13124 return fold_convert (type, arg0);
13130 } /* switch (code) */
13133 /* Perform constant folding and related simplification of EXPR.
13134 The related simplifications include x*1 => x, x*0 => 0, etc.,
13135 and application of the associative law.
13136 NOP_EXPR conversions may be removed freely (as long as we
13137 are careful not to change the type of the overall expression).
13138 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13139 but we can constant-fold them if they have constant operands. */
13141 #ifdef ENABLE_FOLD_CHECKING
13142 # define fold(x) fold_1 (x)
13143 static tree fold_1 (tree);
13149 const tree t = expr;
13150 enum tree_code code = TREE_CODE (t);
13151 enum tree_code_class kind = TREE_CODE_CLASS (code);
13154 /* Return right away if a constant. */
13155 if (kind == tcc_constant)
13158 /* CALL_EXPR-like objects with variable numbers of operands are
13159 treated specially. */
13160 if (kind == tcc_vl_exp)
13162 if (code == CALL_EXPR)
13164 tem = fold_call_expr (expr, false);
13165 return tem ? tem : expr;
13170 if (IS_EXPR_CODE_CLASS (kind))
13172 tree type = TREE_TYPE (t);
13173 tree op0, op1, op2;
13175 switch (TREE_CODE_LENGTH (code))
13178 op0 = TREE_OPERAND (t, 0);
13179 tem = fold_unary (code, type, op0);
13180 return tem ? tem : expr;
13182 op0 = TREE_OPERAND (t, 0);
13183 op1 = TREE_OPERAND (t, 1);
13184 tem = fold_binary (code, type, op0, op1);
13185 return tem ? tem : expr;
13187 op0 = TREE_OPERAND (t, 0);
13188 op1 = TREE_OPERAND (t, 1);
13189 op2 = TREE_OPERAND (t, 2);
13190 tem = fold_ternary (code, type, op0, op1, op2);
13191 return tem ? tem : expr;
13201 tree op0 = TREE_OPERAND (t, 0);
13202 tree op1 = TREE_OPERAND (t, 1);
13204 if (TREE_CODE (op1) == INTEGER_CST
13205 && TREE_CODE (op0) == CONSTRUCTOR
13206 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13208 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13209 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13210 unsigned HOST_WIDE_INT begin = 0;
13212 /* Find a matching index by means of a binary search. */
13213 while (begin != end)
13215 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13216 tree index = VEC_index (constructor_elt, elts, middle)->index;
13218 if (TREE_CODE (index) == INTEGER_CST
13219 && tree_int_cst_lt (index, op1))
13220 begin = middle + 1;
13221 else if (TREE_CODE (index) == INTEGER_CST
13222 && tree_int_cst_lt (op1, index))
13224 else if (TREE_CODE (index) == RANGE_EXPR
13225 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13226 begin = middle + 1;
13227 else if (TREE_CODE (index) == RANGE_EXPR
13228 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13231 return VEC_index (constructor_elt, elts, middle)->value;
13239 return fold (DECL_INITIAL (t));
13243 } /* switch (code) */
13246 #ifdef ENABLE_FOLD_CHECKING
13249 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13250 static void fold_check_failed (const_tree, const_tree);
13251 void print_fold_checksum (const_tree);
13253 /* When --enable-checking=fold, compute a digest of expr before
13254 and after actual fold call to see if fold did not accidentally
13255 change original expr. */
13261 struct md5_ctx ctx;
13262 unsigned char checksum_before[16], checksum_after[16];
13265 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13266 md5_init_ctx (&ctx);
13267 fold_checksum_tree (expr, &ctx, ht);
13268 md5_finish_ctx (&ctx, checksum_before);
13271 ret = fold_1 (expr);
13273 md5_init_ctx (&ctx);
13274 fold_checksum_tree (expr, &ctx, ht);
13275 md5_finish_ctx (&ctx, checksum_after);
13278 if (memcmp (checksum_before, checksum_after, 16))
13279 fold_check_failed (expr, ret);
13285 print_fold_checksum (const_tree expr)
13287 struct md5_ctx ctx;
13288 unsigned char checksum[16], cnt;
13291 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13292 md5_init_ctx (&ctx);
13293 fold_checksum_tree (expr, &ctx, ht);
13294 md5_finish_ctx (&ctx, checksum);
13296 for (cnt = 0; cnt < 16; ++cnt)
13297 fprintf (stderr, "%02x", checksum[cnt]);
13298 putc ('\n', stderr);
13302 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13304 internal_error ("fold check: original tree changed by fold");
13308 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13311 enum tree_code code;
13312 struct tree_function_decl buf;
13317 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13318 <= sizeof (struct tree_function_decl))
13319 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13322 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13326 code = TREE_CODE (expr);
13327 if (TREE_CODE_CLASS (code) == tcc_declaration
13328 && DECL_ASSEMBLER_NAME_SET_P (expr))
13330 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13331 memcpy ((char *) &buf, expr, tree_size (expr));
13332 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13333 expr = (tree) &buf;
13335 else if (TREE_CODE_CLASS (code) == tcc_type
13336 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13337 || TYPE_CACHED_VALUES_P (expr)
13338 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13340 /* Allow these fields to be modified. */
13342 memcpy ((char *) &buf, expr, tree_size (expr));
13343 expr = tmp = (tree) &buf;
13344 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13345 TYPE_POINTER_TO (tmp) = NULL;
13346 TYPE_REFERENCE_TO (tmp) = NULL;
13347 if (TYPE_CACHED_VALUES_P (tmp))
13349 TYPE_CACHED_VALUES_P (tmp) = 0;
13350 TYPE_CACHED_VALUES (tmp) = NULL;
13353 md5_process_bytes (expr, tree_size (expr), ctx);
13354 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13355 if (TREE_CODE_CLASS (code) != tcc_type
13356 && TREE_CODE_CLASS (code) != tcc_declaration
13357 && code != TREE_LIST
13358 && code != SSA_NAME)
13359 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13360 switch (TREE_CODE_CLASS (code))
13366 md5_process_bytes (TREE_STRING_POINTER (expr),
13367 TREE_STRING_LENGTH (expr), ctx);
13370 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13371 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13374 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13380 case tcc_exceptional:
13384 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13385 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13386 expr = TREE_CHAIN (expr);
13387 goto recursive_label;
13390 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13391 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13397 case tcc_expression:
13398 case tcc_reference:
13399 case tcc_comparison:
13402 case tcc_statement:
13404 len = TREE_OPERAND_LENGTH (expr);
13405 for (i = 0; i < len; ++i)
13406 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13408 case tcc_declaration:
13409 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13410 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13411 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13413 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13414 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13415 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13416 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13417 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13419 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13420 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13422 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13424 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13425 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13426 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13430 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13431 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13432 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13433 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13434 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13435 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13436 if (INTEGRAL_TYPE_P (expr)
13437 || SCALAR_FLOAT_TYPE_P (expr))
13439 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13440 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13442 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13443 if (TREE_CODE (expr) == RECORD_TYPE
13444 || TREE_CODE (expr) == UNION_TYPE
13445 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13446 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13447 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13454 /* Helper function for outputting the checksum of a tree T. When
13455 debugging with gdb, you can "define mynext" to be "next" followed
13456 by "call debug_fold_checksum (op0)", then just trace down till the
13460 debug_fold_checksum (const_tree t)
13463 unsigned char checksum[16];
13464 struct md5_ctx ctx;
13465 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13467 md5_init_ctx (&ctx);
13468 fold_checksum_tree (t, &ctx, ht);
13469 md5_finish_ctx (&ctx, checksum);
13472 for (i = 0; i < 16; i++)
13473 fprintf (stderr, "%d ", checksum[i]);
13475 fprintf (stderr, "\n");
13480 /* Fold a unary tree expression with code CODE of type TYPE with an
13481 operand OP0. Return a folded expression if successful. Otherwise,
13482 return a tree expression with code CODE of type TYPE with an
13486 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13489 #ifdef ENABLE_FOLD_CHECKING
13490 unsigned char checksum_before[16], checksum_after[16];
13491 struct md5_ctx ctx;
13494 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13495 md5_init_ctx (&ctx);
13496 fold_checksum_tree (op0, &ctx, ht);
13497 md5_finish_ctx (&ctx, checksum_before);
13501 tem = fold_unary (code, type, op0);
13503 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13505 #ifdef ENABLE_FOLD_CHECKING
13506 md5_init_ctx (&ctx);
13507 fold_checksum_tree (op0, &ctx, ht);
13508 md5_finish_ctx (&ctx, checksum_after);
13511 if (memcmp (checksum_before, checksum_after, 16))
13512 fold_check_failed (op0, tem);
13517 /* Fold a binary tree expression with code CODE of type TYPE with
13518 operands OP0 and OP1. Return a folded expression if successful.
13519 Otherwise, return a tree expression with code CODE of type TYPE
13520 with operands OP0 and OP1. */
13523 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13527 #ifdef ENABLE_FOLD_CHECKING
13528 unsigned char checksum_before_op0[16],
13529 checksum_before_op1[16],
13530 checksum_after_op0[16],
13531 checksum_after_op1[16];
13532 struct md5_ctx ctx;
13535 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13536 md5_init_ctx (&ctx);
13537 fold_checksum_tree (op0, &ctx, ht);
13538 md5_finish_ctx (&ctx, checksum_before_op0);
13541 md5_init_ctx (&ctx);
13542 fold_checksum_tree (op1, &ctx, ht);
13543 md5_finish_ctx (&ctx, checksum_before_op1);
13547 tem = fold_binary (code, type, op0, op1);
13549 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13551 #ifdef ENABLE_FOLD_CHECKING
13552 md5_init_ctx (&ctx);
13553 fold_checksum_tree (op0, &ctx, ht);
13554 md5_finish_ctx (&ctx, checksum_after_op0);
13557 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13558 fold_check_failed (op0, tem);
13560 md5_init_ctx (&ctx);
13561 fold_checksum_tree (op1, &ctx, ht);
13562 md5_finish_ctx (&ctx, checksum_after_op1);
13565 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13566 fold_check_failed (op1, tem);
13571 /* Fold a ternary tree expression with code CODE of type TYPE with
13572 operands OP0, OP1, and OP2. Return a folded expression if
13573 successful. Otherwise, return a tree expression with code CODE of
13574 type TYPE with operands OP0, OP1, and OP2. */
13577 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13581 #ifdef ENABLE_FOLD_CHECKING
13582 unsigned char checksum_before_op0[16],
13583 checksum_before_op1[16],
13584 checksum_before_op2[16],
13585 checksum_after_op0[16],
13586 checksum_after_op1[16],
13587 checksum_after_op2[16];
13588 struct md5_ctx ctx;
13591 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13592 md5_init_ctx (&ctx);
13593 fold_checksum_tree (op0, &ctx, ht);
13594 md5_finish_ctx (&ctx, checksum_before_op0);
13597 md5_init_ctx (&ctx);
13598 fold_checksum_tree (op1, &ctx, ht);
13599 md5_finish_ctx (&ctx, checksum_before_op1);
13602 md5_init_ctx (&ctx);
13603 fold_checksum_tree (op2, &ctx, ht);
13604 md5_finish_ctx (&ctx, checksum_before_op2);
13608 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13609 tem = fold_ternary (code, type, op0, op1, op2);
13611 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13613 #ifdef ENABLE_FOLD_CHECKING
13614 md5_init_ctx (&ctx);
13615 fold_checksum_tree (op0, &ctx, ht);
13616 md5_finish_ctx (&ctx, checksum_after_op0);
13619 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13620 fold_check_failed (op0, tem);
13622 md5_init_ctx (&ctx);
13623 fold_checksum_tree (op1, &ctx, ht);
13624 md5_finish_ctx (&ctx, checksum_after_op1);
13627 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13628 fold_check_failed (op1, tem);
13630 md5_init_ctx (&ctx);
13631 fold_checksum_tree (op2, &ctx, ht);
13632 md5_finish_ctx (&ctx, checksum_after_op2);
13635 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13636 fold_check_failed (op2, tem);
13641 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13642 arguments in ARGARRAY, and a null static chain.
13643 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13644 of type TYPE from the given operands as constructed by build_call_array. */
13647 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13650 #ifdef ENABLE_FOLD_CHECKING
13651 unsigned char checksum_before_fn[16],
13652 checksum_before_arglist[16],
13653 checksum_after_fn[16],
13654 checksum_after_arglist[16];
13655 struct md5_ctx ctx;
13659 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13660 md5_init_ctx (&ctx);
13661 fold_checksum_tree (fn, &ctx, ht);
13662 md5_finish_ctx (&ctx, checksum_before_fn);
13665 md5_init_ctx (&ctx);
13666 for (i = 0; i < nargs; i++)
13667 fold_checksum_tree (argarray[i], &ctx, ht);
13668 md5_finish_ctx (&ctx, checksum_before_arglist);
13672 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13674 #ifdef ENABLE_FOLD_CHECKING
13675 md5_init_ctx (&ctx);
13676 fold_checksum_tree (fn, &ctx, ht);
13677 md5_finish_ctx (&ctx, checksum_after_fn);
13680 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13681 fold_check_failed (fn, tem);
13683 md5_init_ctx (&ctx);
13684 for (i = 0; i < nargs; i++)
13685 fold_checksum_tree (argarray[i], &ctx, ht);
13686 md5_finish_ctx (&ctx, checksum_after_arglist);
13689 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13690 fold_check_failed (NULL_TREE, tem);
13695 /* Perform constant folding and related simplification of initializer
13696 expression EXPR. These behave identically to "fold_buildN" but ignore
13697 potential run-time traps and exceptions that fold must preserve. */
13699 #define START_FOLD_INIT \
13700 int saved_signaling_nans = flag_signaling_nans;\
13701 int saved_trapping_math = flag_trapping_math;\
13702 int saved_rounding_math = flag_rounding_math;\
13703 int saved_trapv = flag_trapv;\
13704 int saved_folding_initializer = folding_initializer;\
13705 flag_signaling_nans = 0;\
13706 flag_trapping_math = 0;\
13707 flag_rounding_math = 0;\
13709 folding_initializer = 1;
13711 #define END_FOLD_INIT \
13712 flag_signaling_nans = saved_signaling_nans;\
13713 flag_trapping_math = saved_trapping_math;\
13714 flag_rounding_math = saved_rounding_math;\
13715 flag_trapv = saved_trapv;\
13716 folding_initializer = saved_folding_initializer;
13719 fold_build1_initializer (enum tree_code code, tree type, tree op)
13724 result = fold_build1 (code, type, op);
13731 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13736 result = fold_build2 (code, type, op0, op1);
13743 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13749 result = fold_build3 (code, type, op0, op1, op2);
13756 fold_build_call_array_initializer (tree type, tree fn,
13757 int nargs, tree *argarray)
13762 result = fold_build_call_array (type, fn, nargs, argarray);
13768 #undef START_FOLD_INIT
13769 #undef END_FOLD_INIT
13771 /* Determine if first argument is a multiple of second argument. Return 0 if
13772 it is not, or we cannot easily determined it to be.
13774 An example of the sort of thing we care about (at this point; this routine
13775 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13776 fold cases do now) is discovering that
13778 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13784 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13786 This code also handles discovering that
13788 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13790 is a multiple of 8 so we don't have to worry about dealing with a
13791 possible remainder.
13793 Note that we *look* inside a SAVE_EXPR only to determine how it was
13794 calculated; it is not safe for fold to do much of anything else with the
13795 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13796 at run time. For example, the latter example above *cannot* be implemented
13797 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13798 evaluation time of the original SAVE_EXPR is not necessarily the same at
13799 the time the new expression is evaluated. The only optimization of this
13800 sort that would be valid is changing
13802 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13806 SAVE_EXPR (I) * SAVE_EXPR (J)
13808 (where the same SAVE_EXPR (J) is used in the original and the
13809 transformed version). */
13812 multiple_of_p (tree type, const_tree top, const_tree bottom)
13814 if (operand_equal_p (top, bottom, 0))
13817 if (TREE_CODE (type) != INTEGER_TYPE)
13820 switch (TREE_CODE (top))
13823 /* Bitwise and provides a power of two multiple. If the mask is
13824 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13825 if (!integer_pow2p (bottom))
13830 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13831 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13835 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13836 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13839 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13843 op1 = TREE_OPERAND (top, 1);
13844 /* const_binop may not detect overflow correctly,
13845 so check for it explicitly here. */
13846 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13847 > TREE_INT_CST_LOW (op1)
13848 && TREE_INT_CST_HIGH (op1) == 0
13849 && 0 != (t1 = fold_convert (type,
13850 const_binop (LSHIFT_EXPR,
13853 && !TREE_OVERFLOW (t1))
13854 return multiple_of_p (type, t1, bottom);
13859 /* Can't handle conversions from non-integral or wider integral type. */
13860 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13861 || (TYPE_PRECISION (type)
13862 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13865 /* .. fall through ... */
13868 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13871 if (TREE_CODE (bottom) != INTEGER_CST
13872 || integer_zerop (bottom)
13873 || (TYPE_UNSIGNED (type)
13874 && (tree_int_cst_sgn (top) < 0
13875 || tree_int_cst_sgn (bottom) < 0)))
13877 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13885 /* Return true if CODE or TYPE is known to be non-negative. */
13888 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13890 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13891 && truth_value_p (code))
13892 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13893 have a signed:1 type (where the value is -1 and 0). */
13898 /* Return true if (CODE OP0) is known to be non-negative. If the return
13899 value is based on the assumption that signed overflow is undefined,
13900 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13901 *STRICT_OVERFLOW_P. */
13904 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13905 bool *strict_overflow_p)
13907 if (TYPE_UNSIGNED (type))
13913 /* We can't return 1 if flag_wrapv is set because
13914 ABS_EXPR<INT_MIN> = INT_MIN. */
13915 if (!INTEGRAL_TYPE_P (type))
13917 if (TYPE_OVERFLOW_UNDEFINED (type))
13919 *strict_overflow_p = true;
13924 case NON_LVALUE_EXPR:
13926 case FIX_TRUNC_EXPR:
13927 return tree_expr_nonnegative_warnv_p (op0,
13928 strict_overflow_p);
13932 tree inner_type = TREE_TYPE (op0);
13933 tree outer_type = type;
13935 if (TREE_CODE (outer_type) == REAL_TYPE)
13937 if (TREE_CODE (inner_type) == REAL_TYPE)
13938 return tree_expr_nonnegative_warnv_p (op0,
13939 strict_overflow_p);
13940 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13942 if (TYPE_UNSIGNED (inner_type))
13944 return tree_expr_nonnegative_warnv_p (op0,
13945 strict_overflow_p);
13948 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13950 if (TREE_CODE (inner_type) == REAL_TYPE)
13951 return tree_expr_nonnegative_warnv_p (op0,
13952 strict_overflow_p);
13953 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13954 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13955 && TYPE_UNSIGNED (inner_type);
13961 return tree_simple_nonnegative_warnv_p (code, type);
13964 /* We don't know sign of `t', so be conservative and return false. */
13968 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13969 value is based on the assumption that signed overflow is undefined,
13970 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13971 *STRICT_OVERFLOW_P. */
13974 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13975 tree op1, bool *strict_overflow_p)
13977 if (TYPE_UNSIGNED (type))
13982 case POINTER_PLUS_EXPR:
13984 if (FLOAT_TYPE_P (type))
13985 return (tree_expr_nonnegative_warnv_p (op0,
13987 && tree_expr_nonnegative_warnv_p (op1,
13988 strict_overflow_p));
13990 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13991 both unsigned and at least 2 bits shorter than the result. */
13992 if (TREE_CODE (type) == INTEGER_TYPE
13993 && TREE_CODE (op0) == NOP_EXPR
13994 && TREE_CODE (op1) == NOP_EXPR)
13996 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13997 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13998 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13999 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14001 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14002 TYPE_PRECISION (inner2)) + 1;
14003 return prec < TYPE_PRECISION (type);
14009 if (FLOAT_TYPE_P (type))
14011 /* x * x for floating point x is always non-negative. */
14012 if (operand_equal_p (op0, op1, 0))
14014 return (tree_expr_nonnegative_warnv_p (op0,
14016 && tree_expr_nonnegative_warnv_p (op1,
14017 strict_overflow_p));
14020 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14021 both unsigned and their total bits is shorter than the result. */
14022 if (TREE_CODE (type) == INTEGER_TYPE
14023 && TREE_CODE (op0) == NOP_EXPR
14024 && TREE_CODE (op1) == NOP_EXPR)
14026 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14027 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14028 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14029 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14030 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14031 < TYPE_PRECISION (type);
14037 return (tree_expr_nonnegative_warnv_p (op0,
14039 || tree_expr_nonnegative_warnv_p (op1,
14040 strict_overflow_p));
14046 case TRUNC_DIV_EXPR:
14047 case CEIL_DIV_EXPR:
14048 case FLOOR_DIV_EXPR:
14049 case ROUND_DIV_EXPR:
14050 return (tree_expr_nonnegative_warnv_p (op0,
14052 && tree_expr_nonnegative_warnv_p (op1,
14053 strict_overflow_p));
14055 case TRUNC_MOD_EXPR:
14056 case CEIL_MOD_EXPR:
14057 case FLOOR_MOD_EXPR:
14058 case ROUND_MOD_EXPR:
14059 return tree_expr_nonnegative_warnv_p (op0,
14060 strict_overflow_p);
14062 return tree_simple_nonnegative_warnv_p (code, type);
14065 /* We don't know sign of `t', so be conservative and return false. */
14069 /* Return true if T is known to be non-negative. If the return
14070 value is based on the assumption that signed overflow is undefined,
14071 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14072 *STRICT_OVERFLOW_P. */
14075 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14077 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14080 switch (TREE_CODE (t))
14083 return tree_int_cst_sgn (t) >= 0;
14086 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14089 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14092 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14094 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14095 strict_overflow_p));
14097 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14100 /* We don't know sign of `t', so be conservative and return false. */
14104 /* Return true if T is known to be non-negative. If the return
14105 value is based on the assumption that signed overflow is undefined,
14106 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14107 *STRICT_OVERFLOW_P. */
14110 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14111 tree arg0, tree arg1, bool *strict_overflow_p)
14113 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14114 switch (DECL_FUNCTION_CODE (fndecl))
14116 CASE_FLT_FN (BUILT_IN_ACOS):
14117 CASE_FLT_FN (BUILT_IN_ACOSH):
14118 CASE_FLT_FN (BUILT_IN_CABS):
14119 CASE_FLT_FN (BUILT_IN_COSH):
14120 CASE_FLT_FN (BUILT_IN_ERFC):
14121 CASE_FLT_FN (BUILT_IN_EXP):
14122 CASE_FLT_FN (BUILT_IN_EXP10):
14123 CASE_FLT_FN (BUILT_IN_EXP2):
14124 CASE_FLT_FN (BUILT_IN_FABS):
14125 CASE_FLT_FN (BUILT_IN_FDIM):
14126 CASE_FLT_FN (BUILT_IN_HYPOT):
14127 CASE_FLT_FN (BUILT_IN_POW10):
14128 CASE_INT_FN (BUILT_IN_FFS):
14129 CASE_INT_FN (BUILT_IN_PARITY):
14130 CASE_INT_FN (BUILT_IN_POPCOUNT):
14131 case BUILT_IN_BSWAP32:
14132 case BUILT_IN_BSWAP64:
14136 CASE_FLT_FN (BUILT_IN_SQRT):
14137 /* sqrt(-0.0) is -0.0. */
14138 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14140 return tree_expr_nonnegative_warnv_p (arg0,
14141 strict_overflow_p);
14143 CASE_FLT_FN (BUILT_IN_ASINH):
14144 CASE_FLT_FN (BUILT_IN_ATAN):
14145 CASE_FLT_FN (BUILT_IN_ATANH):
14146 CASE_FLT_FN (BUILT_IN_CBRT):
14147 CASE_FLT_FN (BUILT_IN_CEIL):
14148 CASE_FLT_FN (BUILT_IN_ERF):
14149 CASE_FLT_FN (BUILT_IN_EXPM1):
14150 CASE_FLT_FN (BUILT_IN_FLOOR):
14151 CASE_FLT_FN (BUILT_IN_FMOD):
14152 CASE_FLT_FN (BUILT_IN_FREXP):
14153 CASE_FLT_FN (BUILT_IN_LCEIL):
14154 CASE_FLT_FN (BUILT_IN_LDEXP):
14155 CASE_FLT_FN (BUILT_IN_LFLOOR):
14156 CASE_FLT_FN (BUILT_IN_LLCEIL):
14157 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14158 CASE_FLT_FN (BUILT_IN_LLRINT):
14159 CASE_FLT_FN (BUILT_IN_LLROUND):
14160 CASE_FLT_FN (BUILT_IN_LRINT):
14161 CASE_FLT_FN (BUILT_IN_LROUND):
14162 CASE_FLT_FN (BUILT_IN_MODF):
14163 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14164 CASE_FLT_FN (BUILT_IN_RINT):
14165 CASE_FLT_FN (BUILT_IN_ROUND):
14166 CASE_FLT_FN (BUILT_IN_SCALB):
14167 CASE_FLT_FN (BUILT_IN_SCALBLN):
14168 CASE_FLT_FN (BUILT_IN_SCALBN):
14169 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14170 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14171 CASE_FLT_FN (BUILT_IN_SINH):
14172 CASE_FLT_FN (BUILT_IN_TANH):
14173 CASE_FLT_FN (BUILT_IN_TRUNC):
14174 /* True if the 1st argument is nonnegative. */
14175 return tree_expr_nonnegative_warnv_p (arg0,
14176 strict_overflow_p);
14178 CASE_FLT_FN (BUILT_IN_FMAX):
14179 /* True if the 1st OR 2nd arguments are nonnegative. */
14180 return (tree_expr_nonnegative_warnv_p (arg0,
14182 || (tree_expr_nonnegative_warnv_p (arg1,
14183 strict_overflow_p)));
14185 CASE_FLT_FN (BUILT_IN_FMIN):
14186 /* True if the 1st AND 2nd arguments are nonnegative. */
14187 return (tree_expr_nonnegative_warnv_p (arg0,
14189 && (tree_expr_nonnegative_warnv_p (arg1,
14190 strict_overflow_p)));
14192 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14193 /* True if the 2nd argument is nonnegative. */
14194 return tree_expr_nonnegative_warnv_p (arg1,
14195 strict_overflow_p);
14197 CASE_FLT_FN (BUILT_IN_POWI):
14198 /* True if the 1st argument is nonnegative or the second
14199 argument is an even integer. */
14200 if (TREE_CODE (arg1) == INTEGER_CST
14201 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14203 return tree_expr_nonnegative_warnv_p (arg0,
14204 strict_overflow_p);
14206 CASE_FLT_FN (BUILT_IN_POW):
14207 /* True if the 1st argument is nonnegative or the second
14208 argument is an even integer valued real. */
14209 if (TREE_CODE (arg1) == REAL_CST)
14214 c = TREE_REAL_CST (arg1);
14215 n = real_to_integer (&c);
14218 REAL_VALUE_TYPE cint;
14219 real_from_integer (&cint, VOIDmode, n,
14220 n < 0 ? -1 : 0, 0);
14221 if (real_identical (&c, &cint))
14225 return tree_expr_nonnegative_warnv_p (arg0,
14226 strict_overflow_p);
14231 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14235 /* Return true if T is known to be non-negative. If the return
14236 value is based on the assumption that signed overflow is undefined,
14237 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14238 *STRICT_OVERFLOW_P. */
14241 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14243 enum tree_code code = TREE_CODE (t);
14244 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14251 tree temp = TARGET_EXPR_SLOT (t);
14252 t = TARGET_EXPR_INITIAL (t);
14254 /* If the initializer is non-void, then it's a normal expression
14255 that will be assigned to the slot. */
14256 if (!VOID_TYPE_P (t))
14257 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14259 /* Otherwise, the initializer sets the slot in some way. One common
14260 way is an assignment statement at the end of the initializer. */
14263 if (TREE_CODE (t) == BIND_EXPR)
14264 t = expr_last (BIND_EXPR_BODY (t));
14265 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14266 || TREE_CODE (t) == TRY_CATCH_EXPR)
14267 t = expr_last (TREE_OPERAND (t, 0));
14268 else if (TREE_CODE (t) == STATEMENT_LIST)
14273 if (TREE_CODE (t) == MODIFY_EXPR
14274 && TREE_OPERAND (t, 0) == temp)
14275 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14276 strict_overflow_p);
14283 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14284 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14286 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14287 get_callee_fndecl (t),
14290 strict_overflow_p);
14292 case COMPOUND_EXPR:
14294 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14295 strict_overflow_p);
14297 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14298 strict_overflow_p);
14300 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14301 strict_overflow_p);
14304 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14308 /* We don't know sign of `t', so be conservative and return false. */
14312 /* Return true if T is known to be non-negative. If the return
14313 value is based on the assumption that signed overflow is undefined,
14314 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14315 *STRICT_OVERFLOW_P. */
14318 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14320 enum tree_code code;
14321 if (t == error_mark_node)
14324 code = TREE_CODE (t);
14325 switch (TREE_CODE_CLASS (code))
14328 case tcc_comparison:
14329 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14331 TREE_OPERAND (t, 0),
14332 TREE_OPERAND (t, 1),
14333 strict_overflow_p);
14336 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14338 TREE_OPERAND (t, 0),
14339 strict_overflow_p);
14342 case tcc_declaration:
14343 case tcc_reference:
14344 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14352 case TRUTH_AND_EXPR:
14353 case TRUTH_OR_EXPR:
14354 case TRUTH_XOR_EXPR:
14355 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14357 TREE_OPERAND (t, 0),
14358 TREE_OPERAND (t, 1),
14359 strict_overflow_p);
14360 case TRUTH_NOT_EXPR:
14361 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14363 TREE_OPERAND (t, 0),
14364 strict_overflow_p);
14371 case WITH_SIZE_EXPR:
14375 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14378 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14382 /* Return true if `t' is known to be non-negative. Handle warnings
14383 about undefined signed overflow. */
14386 tree_expr_nonnegative_p (tree t)
14388 bool ret, strict_overflow_p;
14390 strict_overflow_p = false;
14391 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14392 if (strict_overflow_p)
14393 fold_overflow_warning (("assuming signed overflow does not occur when "
14394 "determining that expression is always "
14396 WARN_STRICT_OVERFLOW_MISC);
14401 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14402 For floating point we further ensure that T is not denormal.
14403 Similar logic is present in nonzero_address in rtlanal.h.
14405 If the return value is based on the assumption that signed overflow
14406 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14407 change *STRICT_OVERFLOW_P. */
14410 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14411 bool *strict_overflow_p)
14416 return tree_expr_nonzero_warnv_p (op0,
14417 strict_overflow_p);
14421 tree inner_type = TREE_TYPE (op0);
14422 tree outer_type = type;
14424 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14425 && tree_expr_nonzero_warnv_p (op0,
14426 strict_overflow_p));
14430 case NON_LVALUE_EXPR:
14431 return tree_expr_nonzero_warnv_p (op0,
14432 strict_overflow_p);
14441 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14442 For floating point we further ensure that T is not denormal.
14443 Similar logic is present in nonzero_address in rtlanal.h.
14445 If the return value is based on the assumption that signed overflow
14446 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14447 change *STRICT_OVERFLOW_P. */
14450 tree_binary_nonzero_warnv_p (enum tree_code code,
14453 tree op1, bool *strict_overflow_p)
14455 bool sub_strict_overflow_p;
14458 case POINTER_PLUS_EXPR:
14460 if (TYPE_OVERFLOW_UNDEFINED (type))
14462 /* With the presence of negative values it is hard
14463 to say something. */
14464 sub_strict_overflow_p = false;
14465 if (!tree_expr_nonnegative_warnv_p (op0,
14466 &sub_strict_overflow_p)
14467 || !tree_expr_nonnegative_warnv_p (op1,
14468 &sub_strict_overflow_p))
14470 /* One of operands must be positive and the other non-negative. */
14471 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14472 overflows, on a twos-complement machine the sum of two
14473 nonnegative numbers can never be zero. */
14474 return (tree_expr_nonzero_warnv_p (op0,
14476 || tree_expr_nonzero_warnv_p (op1,
14477 strict_overflow_p));
14482 if (TYPE_OVERFLOW_UNDEFINED (type))
14484 if (tree_expr_nonzero_warnv_p (op0,
14486 && tree_expr_nonzero_warnv_p (op1,
14487 strict_overflow_p))
14489 *strict_overflow_p = true;
14496 sub_strict_overflow_p = false;
14497 if (tree_expr_nonzero_warnv_p (op0,
14498 &sub_strict_overflow_p)
14499 && tree_expr_nonzero_warnv_p (op1,
14500 &sub_strict_overflow_p))
14502 if (sub_strict_overflow_p)
14503 *strict_overflow_p = true;
14508 sub_strict_overflow_p = false;
14509 if (tree_expr_nonzero_warnv_p (op0,
14510 &sub_strict_overflow_p))
14512 if (sub_strict_overflow_p)
14513 *strict_overflow_p = true;
14515 /* When both operands are nonzero, then MAX must be too. */
14516 if (tree_expr_nonzero_warnv_p (op1,
14517 strict_overflow_p))
14520 /* MAX where operand 0 is positive is positive. */
14521 return tree_expr_nonnegative_warnv_p (op0,
14522 strict_overflow_p);
14524 /* MAX where operand 1 is positive is positive. */
14525 else if (tree_expr_nonzero_warnv_p (op1,
14526 &sub_strict_overflow_p)
14527 && tree_expr_nonnegative_warnv_p (op1,
14528 &sub_strict_overflow_p))
14530 if (sub_strict_overflow_p)
14531 *strict_overflow_p = true;
14537 return (tree_expr_nonzero_warnv_p (op1,
14539 || tree_expr_nonzero_warnv_p (op0,
14540 strict_overflow_p));
14549 /* Return true when T is an address and is known to be nonzero.
14550 For floating point we further ensure that T is not denormal.
14551 Similar logic is present in nonzero_address in rtlanal.h.
14553 If the return value is based on the assumption that signed overflow
14554 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14555 change *STRICT_OVERFLOW_P. */
14558 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14560 bool sub_strict_overflow_p;
14561 switch (TREE_CODE (t))
14564 return !integer_zerop (t);
14568 tree base = get_base_address (TREE_OPERAND (t, 0));
14573 /* Weak declarations may link to NULL. */
14574 if (VAR_OR_FUNCTION_DECL_P (base))
14575 return !DECL_WEAK (base);
14577 /* Constants are never weak. */
14578 if (CONSTANT_CLASS_P (base))
14585 sub_strict_overflow_p = false;
14586 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14587 &sub_strict_overflow_p)
14588 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14589 &sub_strict_overflow_p))
14591 if (sub_strict_overflow_p)
14592 *strict_overflow_p = true;
14603 /* Return true when T is an address and is known to be nonzero.
14604 For floating point we further ensure that T is not denormal.
14605 Similar logic is present in nonzero_address in rtlanal.h.
14607 If the return value is based on the assumption that signed overflow
14608 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14609 change *STRICT_OVERFLOW_P. */
14612 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14614 tree type = TREE_TYPE (t);
14615 enum tree_code code;
14617 /* Doing something useful for floating point would need more work. */
14618 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14621 code = TREE_CODE (t);
14622 switch (TREE_CODE_CLASS (code))
14625 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14626 strict_overflow_p);
14628 case tcc_comparison:
14629 return tree_binary_nonzero_warnv_p (code, type,
14630 TREE_OPERAND (t, 0),
14631 TREE_OPERAND (t, 1),
14632 strict_overflow_p);
14634 case tcc_declaration:
14635 case tcc_reference:
14636 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14644 case TRUTH_NOT_EXPR:
14645 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14646 strict_overflow_p);
14648 case TRUTH_AND_EXPR:
14649 case TRUTH_OR_EXPR:
14650 case TRUTH_XOR_EXPR:
14651 return tree_binary_nonzero_warnv_p (code, type,
14652 TREE_OPERAND (t, 0),
14653 TREE_OPERAND (t, 1),
14654 strict_overflow_p);
14661 case WITH_SIZE_EXPR:
14665 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14667 case COMPOUND_EXPR:
14670 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14671 strict_overflow_p);
14674 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14675 strict_overflow_p);
14678 return alloca_call_p (t);
14686 /* Return true when T is an address and is known to be nonzero.
14687 Handle warnings about undefined signed overflow. */
14690 tree_expr_nonzero_p (tree t)
14692 bool ret, strict_overflow_p;
14694 strict_overflow_p = false;
14695 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14696 if (strict_overflow_p)
14697 fold_overflow_warning (("assuming signed overflow does not occur when "
14698 "determining that expression is always "
14700 WARN_STRICT_OVERFLOW_MISC);
14704 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14705 attempt to fold the expression to a constant without modifying TYPE,
14708 If the expression could be simplified to a constant, then return
14709 the constant. If the expression would not be simplified to a
14710 constant, then return NULL_TREE. */
14713 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14715 tree tem = fold_binary (code, type, op0, op1);
14716 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14719 /* Given the components of a unary expression CODE, TYPE and OP0,
14720 attempt to fold the expression to a constant without modifying
14723 If the expression could be simplified to a constant, then return
14724 the constant. If the expression would not be simplified to a
14725 constant, then return NULL_TREE. */
14728 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14730 tree tem = fold_unary (code, type, op0);
14731 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14734 /* If EXP represents referencing an element in a constant string
14735 (either via pointer arithmetic or array indexing), return the
14736 tree representing the value accessed, otherwise return NULL. */
14739 fold_read_from_constant_string (tree exp)
14741 if ((TREE_CODE (exp) == INDIRECT_REF
14742 || TREE_CODE (exp) == ARRAY_REF)
14743 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14745 tree exp1 = TREE_OPERAND (exp, 0);
14749 if (TREE_CODE (exp) == INDIRECT_REF)
14750 string = string_constant (exp1, &index);
14753 tree low_bound = array_ref_low_bound (exp);
14754 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14756 /* Optimize the special-case of a zero lower bound.
14758 We convert the low_bound to sizetype to avoid some problems
14759 with constant folding. (E.g. suppose the lower bound is 1,
14760 and its mode is QI. Without the conversion,l (ARRAY
14761 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14762 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14763 if (! integer_zerop (low_bound))
14764 index = size_diffop (index, fold_convert (sizetype, low_bound));
14770 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14771 && TREE_CODE (string) == STRING_CST
14772 && TREE_CODE (index) == INTEGER_CST
14773 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14774 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14776 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14777 return build_int_cst_type (TREE_TYPE (exp),
14778 (TREE_STRING_POINTER (string)
14779 [TREE_INT_CST_LOW (index)]));
14784 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14785 an integer constant, real, or fixed-point constant.
14787 TYPE is the type of the result. */
14790 fold_negate_const (tree arg0, tree type)
14792 tree t = NULL_TREE;
14794 switch (TREE_CODE (arg0))
14798 unsigned HOST_WIDE_INT low;
14799 HOST_WIDE_INT high;
14800 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14801 TREE_INT_CST_HIGH (arg0),
14803 t = force_fit_type_double (type, low, high, 1,
14804 (overflow | TREE_OVERFLOW (arg0))
14805 && !TYPE_UNSIGNED (type));
14810 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14815 FIXED_VALUE_TYPE f;
14816 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14817 &(TREE_FIXED_CST (arg0)), NULL,
14818 TYPE_SATURATING (type));
14819 t = build_fixed (type, f);
14820 /* Propagate overflow flags. */
14821 if (overflow_p | TREE_OVERFLOW (arg0))
14823 TREE_OVERFLOW (t) = 1;
14824 TREE_CONSTANT_OVERFLOW (t) = 1;
14826 else if (TREE_CONSTANT_OVERFLOW (arg0))
14827 TREE_CONSTANT_OVERFLOW (t) = 1;
14832 gcc_unreachable ();
14838 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14839 an integer constant or real constant.
14841 TYPE is the type of the result. */
14844 fold_abs_const (tree arg0, tree type)
14846 tree t = NULL_TREE;
14848 switch (TREE_CODE (arg0))
14851 /* If the value is unsigned, then the absolute value is
14852 the same as the ordinary value. */
14853 if (TYPE_UNSIGNED (type))
14855 /* Similarly, if the value is non-negative. */
14856 else if (INT_CST_LT (integer_minus_one_node, arg0))
14858 /* If the value is negative, then the absolute value is
14862 unsigned HOST_WIDE_INT low;
14863 HOST_WIDE_INT high;
14864 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14865 TREE_INT_CST_HIGH (arg0),
14867 t = force_fit_type_double (type, low, high, -1,
14868 overflow | TREE_OVERFLOW (arg0));
14873 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14874 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14880 gcc_unreachable ();
14886 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14887 constant. TYPE is the type of the result. */
14890 fold_not_const (tree arg0, tree type)
14892 tree t = NULL_TREE;
14894 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14896 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14897 ~TREE_INT_CST_HIGH (arg0), 0,
14898 TREE_OVERFLOW (arg0));
14903 /* Given CODE, a relational operator, the target type, TYPE and two
14904 constant operands OP0 and OP1, return the result of the
14905 relational operation. If the result is not a compile time
14906 constant, then return NULL_TREE. */
14909 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14911 int result, invert;
14913 /* From here on, the only cases we handle are when the result is
14914 known to be a constant. */
14916 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14918 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14919 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14921 /* Handle the cases where either operand is a NaN. */
14922 if (real_isnan (c0) || real_isnan (c1))
14932 case UNORDERED_EXPR:
14946 if (flag_trapping_math)
14952 gcc_unreachable ();
14955 return constant_boolean_node (result, type);
14958 return constant_boolean_node (real_compare (code, c0, c1), type);
14961 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14963 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14964 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14965 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14968 /* Handle equality/inequality of complex constants. */
14969 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14971 tree rcond = fold_relational_const (code, type,
14972 TREE_REALPART (op0),
14973 TREE_REALPART (op1));
14974 tree icond = fold_relational_const (code, type,
14975 TREE_IMAGPART (op0),
14976 TREE_IMAGPART (op1));
14977 if (code == EQ_EXPR)
14978 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14979 else if (code == NE_EXPR)
14980 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14985 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14987 To compute GT, swap the arguments and do LT.
14988 To compute GE, do LT and invert the result.
14989 To compute LE, swap the arguments, do LT and invert the result.
14990 To compute NE, do EQ and invert the result.
14992 Therefore, the code below must handle only EQ and LT. */
14994 if (code == LE_EXPR || code == GT_EXPR)
14999 code = swap_tree_comparison (code);
15002 /* Note that it is safe to invert for real values here because we
15003 have already handled the one case that it matters. */
15006 if (code == NE_EXPR || code == GE_EXPR)
15009 code = invert_tree_comparison (code, false);
15012 /* Compute a result for LT or EQ if args permit;
15013 Otherwise return T. */
15014 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15016 if (code == EQ_EXPR)
15017 result = tree_int_cst_equal (op0, op1);
15018 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15019 result = INT_CST_LT_UNSIGNED (op0, op1);
15021 result = INT_CST_LT (op0, op1);
15028 return constant_boolean_node (result, type);
15031 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15032 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15036 fold_build_cleanup_point_expr (tree type, tree expr)
15038 /* If the expression does not have side effects then we don't have to wrap
15039 it with a cleanup point expression. */
15040 if (!TREE_SIDE_EFFECTS (expr))
15043 /* If the expression is a return, check to see if the expression inside the
15044 return has no side effects or the right hand side of the modify expression
15045 inside the return. If either don't have side effects set we don't need to
15046 wrap the expression in a cleanup point expression. Note we don't check the
15047 left hand side of the modify because it should always be a return decl. */
15048 if (TREE_CODE (expr) == RETURN_EXPR)
15050 tree op = TREE_OPERAND (expr, 0);
15051 if (!op || !TREE_SIDE_EFFECTS (op))
15053 op = TREE_OPERAND (op, 1);
15054 if (!TREE_SIDE_EFFECTS (op))
15058 return build1 (CLEANUP_POINT_EXPR, type, expr);
15061 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15062 of an indirection through OP0, or NULL_TREE if no simplification is
15066 fold_indirect_ref_1 (tree type, tree op0)
15072 subtype = TREE_TYPE (sub);
15073 if (!POINTER_TYPE_P (subtype))
15076 if (TREE_CODE (sub) == ADDR_EXPR)
15078 tree op = TREE_OPERAND (sub, 0);
15079 tree optype = TREE_TYPE (op);
15080 /* *&CONST_DECL -> to the value of the const decl. */
15081 if (TREE_CODE (op) == CONST_DECL)
15082 return DECL_INITIAL (op);
15083 /* *&p => p; make sure to handle *&"str"[cst] here. */
15084 if (type == optype)
15086 tree fop = fold_read_from_constant_string (op);
15092 /* *(foo *)&fooarray => fooarray[0] */
15093 else if (TREE_CODE (optype) == ARRAY_TYPE
15094 && type == TREE_TYPE (optype))
15096 tree type_domain = TYPE_DOMAIN (optype);
15097 tree min_val = size_zero_node;
15098 if (type_domain && TYPE_MIN_VALUE (type_domain))
15099 min_val = TYPE_MIN_VALUE (type_domain);
15100 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15102 /* *(foo *)&complexfoo => __real__ complexfoo */
15103 else if (TREE_CODE (optype) == COMPLEX_TYPE
15104 && type == TREE_TYPE (optype))
15105 return fold_build1 (REALPART_EXPR, type, op);
15106 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15107 else if (TREE_CODE (optype) == VECTOR_TYPE
15108 && type == TREE_TYPE (optype))
15110 tree part_width = TYPE_SIZE (type);
15111 tree index = bitsize_int (0);
15112 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15116 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15117 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15118 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15120 tree op00 = TREE_OPERAND (sub, 0);
15121 tree op01 = TREE_OPERAND (sub, 1);
15125 op00type = TREE_TYPE (op00);
15126 if (TREE_CODE (op00) == ADDR_EXPR
15127 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15128 && type == TREE_TYPE (TREE_TYPE (op00type)))
15130 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15131 tree part_width = TYPE_SIZE (type);
15132 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15133 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15134 tree index = bitsize_int (indexi);
15136 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15137 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15138 part_width, index);
15144 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15145 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15146 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15148 tree op00 = TREE_OPERAND (sub, 0);
15149 tree op01 = TREE_OPERAND (sub, 1);
15153 op00type = TREE_TYPE (op00);
15154 if (TREE_CODE (op00) == ADDR_EXPR
15155 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15156 && type == TREE_TYPE (TREE_TYPE (op00type)))
15158 tree size = TYPE_SIZE_UNIT (type);
15159 if (tree_int_cst_equal (size, op01))
15160 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15164 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15165 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15166 && type == TREE_TYPE (TREE_TYPE (subtype)))
15169 tree min_val = size_zero_node;
15170 sub = build_fold_indirect_ref (sub);
15171 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15172 if (type_domain && TYPE_MIN_VALUE (type_domain))
15173 min_val = TYPE_MIN_VALUE (type_domain);
15174 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15180 /* Builds an expression for an indirection through T, simplifying some
15184 build_fold_indirect_ref (tree t)
15186 tree type = TREE_TYPE (TREE_TYPE (t));
15187 tree sub = fold_indirect_ref_1 (type, t);
15192 return build1 (INDIRECT_REF, type, t);
15195 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15198 fold_indirect_ref (tree t)
15200 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15208 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15209 whose result is ignored. The type of the returned tree need not be
15210 the same as the original expression. */
15213 fold_ignored_result (tree t)
15215 if (!TREE_SIDE_EFFECTS (t))
15216 return integer_zero_node;
15219 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15222 t = TREE_OPERAND (t, 0);
15226 case tcc_comparison:
15227 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15228 t = TREE_OPERAND (t, 0);
15229 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15230 t = TREE_OPERAND (t, 1);
15235 case tcc_expression:
15236 switch (TREE_CODE (t))
15238 case COMPOUND_EXPR:
15239 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15241 t = TREE_OPERAND (t, 0);
15245 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15246 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15248 t = TREE_OPERAND (t, 0);
15261 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15262 This can only be applied to objects of a sizetype. */
15265 round_up (tree value, int divisor)
15267 tree div = NULL_TREE;
15269 gcc_assert (divisor > 0);
15273 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15274 have to do anything. Only do this when we are not given a const,
15275 because in that case, this check is more expensive than just
15277 if (TREE_CODE (value) != INTEGER_CST)
15279 div = build_int_cst (TREE_TYPE (value), divisor);
15281 if (multiple_of_p (TREE_TYPE (value), value, div))
15285 /* If divisor is a power of two, simplify this to bit manipulation. */
15286 if (divisor == (divisor & -divisor))
15288 if (TREE_CODE (value) == INTEGER_CST)
15290 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15291 unsigned HOST_WIDE_INT high;
15294 if ((low & (divisor - 1)) == 0)
15297 overflow_p = TREE_OVERFLOW (value);
15298 high = TREE_INT_CST_HIGH (value);
15299 low &= ~(divisor - 1);
15308 return force_fit_type_double (TREE_TYPE (value), low, high,
15315 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15316 value = size_binop (PLUS_EXPR, value, t);
15317 t = build_int_cst (TREE_TYPE (value), -divisor);
15318 value = size_binop (BIT_AND_EXPR, value, t);
15324 div = build_int_cst (TREE_TYPE (value), divisor);
15325 value = size_binop (CEIL_DIV_EXPR, value, div);
15326 value = size_binop (MULT_EXPR, value, div);
15332 /* Likewise, but round down. */
15335 round_down (tree value, int divisor)
15337 tree div = NULL_TREE;
15339 gcc_assert (divisor > 0);
15343 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15344 have to do anything. Only do this when we are not given a const,
15345 because in that case, this check is more expensive than just
15347 if (TREE_CODE (value) != INTEGER_CST)
15349 div = build_int_cst (TREE_TYPE (value), divisor);
15351 if (multiple_of_p (TREE_TYPE (value), value, div))
15355 /* If divisor is a power of two, simplify this to bit manipulation. */
15356 if (divisor == (divisor & -divisor))
15360 t = build_int_cst (TREE_TYPE (value), -divisor);
15361 value = size_binop (BIT_AND_EXPR, value, t);
15366 div = build_int_cst (TREE_TYPE (value), divisor);
15367 value = size_binop (FLOOR_DIV_EXPR, value, div);
15368 value = size_binop (MULT_EXPR, value, div);
15374 /* Returns the pointer to the base of the object addressed by EXP and
15375 extracts the information about the offset of the access, storing it
15376 to PBITPOS and POFFSET. */
15379 split_address_to_core_and_offset (tree exp,
15380 HOST_WIDE_INT *pbitpos, tree *poffset)
15383 enum machine_mode mode;
15384 int unsignedp, volatilep;
15385 HOST_WIDE_INT bitsize;
15387 if (TREE_CODE (exp) == ADDR_EXPR)
15389 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15390 poffset, &mode, &unsignedp, &volatilep,
15392 core = fold_addr_expr (core);
15398 *poffset = NULL_TREE;
15404 /* Returns true if addresses of E1 and E2 differ by a constant, false
15405 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15408 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15411 HOST_WIDE_INT bitpos1, bitpos2;
15412 tree toffset1, toffset2, tdiff, type;
15414 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15415 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15417 if (bitpos1 % BITS_PER_UNIT != 0
15418 || bitpos2 % BITS_PER_UNIT != 0
15419 || !operand_equal_p (core1, core2, 0))
15422 if (toffset1 && toffset2)
15424 type = TREE_TYPE (toffset1);
15425 if (type != TREE_TYPE (toffset2))
15426 toffset2 = fold_convert (type, toffset2);
15428 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15429 if (!cst_and_fits_in_hwi (tdiff))
15432 *diff = int_cst_value (tdiff);
15434 else if (toffset1 || toffset2)
15436 /* If only one of the offsets is non-constant, the difference cannot
15443 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15447 /* Simplify the floating point expression EXP when the sign of the
15448 result is not significant. Return NULL_TREE if no simplification
15452 fold_strip_sign_ops (tree exp)
15456 switch (TREE_CODE (exp))
15460 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15461 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15465 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15467 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15468 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15469 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15470 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15471 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15472 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15475 case COMPOUND_EXPR:
15476 arg0 = TREE_OPERAND (exp, 0);
15477 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15479 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15483 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15484 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15486 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15487 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15488 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15493 const enum built_in_function fcode = builtin_mathfn_code (exp);
15496 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15497 /* Strip copysign function call, return the 1st argument. */
15498 arg0 = CALL_EXPR_ARG (exp, 0);
15499 arg1 = CALL_EXPR_ARG (exp, 1);
15500 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15503 /* Strip sign ops from the argument of "odd" math functions. */
15504 if (negate_mathfn_p (fcode))
15506 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15508 return build_call_expr (get_callee_fndecl (exp), 1, arg0);