1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 || (overflowable > 0 && sign_extended_type))
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
277 TREE_CONSTANT_OVERFLOW (t) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
298 h = h1 + h2 + (l < l1);
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 return (*hv & h1) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
355 for (j = 0; j < 4; j++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 if (SHIFT_COUNT_TRUNCATED)
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 unsigned HOST_WIDE_INT signmask;
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 if (SHIFT_COUNT_TRUNCATED)
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
663 { /* scale divisor and dividend */
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
742 decode (quo, lquo, hquo);
745 /* If result is negative, make it so. */
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, <wice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code)
872 case BUILT_IN_ASINHF:
873 case BUILT_IN_ASINHL:
878 case BUILT_IN_ATANHF:
879 case BUILT_IN_ATANHL:
903 /* Check whether we may negate an integer constant T without causing
907 may_negate_without_overflow_p (tree t)
909 unsigned HOST_WIDE_INT val;
913 gcc_assert (TREE_CODE (t) == INTEGER_CST);
915 type = TREE_TYPE (t);
916 if (TYPE_UNSIGNED (type))
919 prec = TYPE_PRECISION (type);
920 if (prec > HOST_BITS_PER_WIDE_INT)
922 if (TREE_INT_CST_LOW (t) != 0)
924 prec -= HOST_BITS_PER_WIDE_INT;
925 val = TREE_INT_CST_HIGH (t);
928 val = TREE_INT_CST_LOW (t);
929 if (prec < HOST_BITS_PER_WIDE_INT)
930 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
931 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
934 /* Determine whether an expression T can be cheaply negated using
935 the function negate_expr. */
938 negate_expr_p (tree t)
945 type = TREE_TYPE (t);
948 switch (TREE_CODE (t))
951 if (TYPE_UNSIGNED (type) || ! flag_trapv)
954 /* Check that -CST will not overflow type. */
955 return may_negate_without_overflow_p (t);
962 return negate_expr_p (TREE_REALPART (t))
963 && negate_expr_p (TREE_IMAGPART (t));
966 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
968 /* -(A + B) -> (-B) - A. */
969 if (negate_expr_p (TREE_OPERAND (t, 1))
970 && reorder_operands_p (TREE_OPERAND (t, 0),
971 TREE_OPERAND (t, 1)))
973 /* -(A + B) -> (-A) - B. */
974 return negate_expr_p (TREE_OPERAND (t, 0));
977 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
978 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
979 && reorder_operands_p (TREE_OPERAND (t, 0),
980 TREE_OPERAND (t, 1));
983 if (TYPE_UNSIGNED (TREE_TYPE (t)))
989 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
990 return negate_expr_p (TREE_OPERAND (t, 1))
991 || negate_expr_p (TREE_OPERAND (t, 0));
999 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1001 return negate_expr_p (TREE_OPERAND (t, 1))
1002 || negate_expr_p (TREE_OPERAND (t, 0));
1005 /* Negate -((double)float) as (double)(-float). */
1006 if (TREE_CODE (type) == REAL_TYPE)
1008 tree tem = strip_float_extensions (t);
1010 return negate_expr_p (tem);
1015 /* Negate -f(x) as f(-x). */
1016 if (negate_mathfn_p (builtin_mathfn_code (t)))
1017 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1021 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1022 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1024 tree op1 = TREE_OPERAND (t, 1);
1025 if (TREE_INT_CST_HIGH (op1) == 0
1026 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1027 == TREE_INT_CST_LOW (op1))
1038 /* Given T, an expression, return the negation of T. Allow for T to be
1039 null, in which case return null. */
1042 negate_expr (tree t)
1050 type = TREE_TYPE (t);
1051 STRIP_SIGN_NOPS (t);
1053 switch (TREE_CODE (t))
1056 tem = fold_negate_const (t, type);
1057 if (! TREE_OVERFLOW (tem)
1058 || TYPE_UNSIGNED (type)
1064 tem = fold_negate_const (t, type);
1065 /* Two's complement FP formats, such as c4x, may overflow. */
1066 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1067 return fold_convert (type, tem);
1072 tree rpart = negate_expr (TREE_REALPART (t));
1073 tree ipart = negate_expr (TREE_IMAGPART (t));
1075 if ((TREE_CODE (rpart) == REAL_CST
1076 && TREE_CODE (ipart) == REAL_CST)
1077 || (TREE_CODE (rpart) == INTEGER_CST
1078 && TREE_CODE (ipart) == INTEGER_CST))
1079 return build_complex (type, rpart, ipart);
1084 return fold_convert (type, TREE_OPERAND (t, 0));
1087 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 /* -(A + B) -> (-B) - A. */
1090 if (negate_expr_p (TREE_OPERAND (t, 1))
1091 && reorder_operands_p (TREE_OPERAND (t, 0),
1092 TREE_OPERAND (t, 1)))
1094 tem = negate_expr (TREE_OPERAND (t, 1));
1095 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1096 tem, TREE_OPERAND (t, 0));
1097 return fold_convert (type, tem);
1100 /* -(A + B) -> (-A) - B. */
1101 if (negate_expr_p (TREE_OPERAND (t, 0)))
1103 tem = negate_expr (TREE_OPERAND (t, 0));
1104 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1105 tem, TREE_OPERAND (t, 1));
1106 return fold_convert (type, tem);
1112 /* - (A - B) -> B - A */
1113 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1114 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1115 return fold_convert (type,
1116 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1117 TREE_OPERAND (t, 1),
1118 TREE_OPERAND (t, 0)));
1122 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1128 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1130 tem = TREE_OPERAND (t, 1);
1131 if (negate_expr_p (tem))
1132 return fold_convert (type,
1133 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1134 TREE_OPERAND (t, 0),
1135 negate_expr (tem)));
1136 tem = TREE_OPERAND (t, 0);
1137 if (negate_expr_p (tem))
1138 return fold_convert (type,
1139 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1141 TREE_OPERAND (t, 1)));
1145 case TRUNC_DIV_EXPR:
1146 case ROUND_DIV_EXPR:
1147 case FLOOR_DIV_EXPR:
1149 case EXACT_DIV_EXPR:
1150 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1152 tem = TREE_OPERAND (t, 1);
1153 if (negate_expr_p (tem))
1154 return fold_convert (type,
1155 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1156 TREE_OPERAND (t, 0),
1157 negate_expr (tem)));
1158 tem = TREE_OPERAND (t, 0);
1159 if (negate_expr_p (tem))
1160 return fold_convert (type,
1161 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1163 TREE_OPERAND (t, 1)));
1168 /* Convert -((double)float) into (double)(-float). */
1169 if (TREE_CODE (type) == REAL_TYPE)
1171 tem = strip_float_extensions (t);
1172 if (tem != t && negate_expr_p (tem))
1173 return fold_convert (type, negate_expr (tem));
1178 /* Negate -f(x) as f(-x). */
1179 if (negate_mathfn_p (builtin_mathfn_code (t))
1180 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1182 tree fndecl, arg, arglist;
1184 fndecl = get_callee_fndecl (t);
1185 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1186 arglist = build_tree_list (NULL_TREE, arg);
1187 return build_function_call_expr (fndecl, arglist);
1192 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1193 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1195 tree op1 = TREE_OPERAND (t, 1);
1196 if (TREE_INT_CST_HIGH (op1) == 0
1197 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1198 == TREE_INT_CST_LOW (op1))
1200 tree ntype = TYPE_UNSIGNED (type)
1201 ? lang_hooks.types.signed_type (type)
1202 : lang_hooks.types.unsigned_type (type);
1203 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1204 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1205 return fold_convert (type, temp);
1214 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1215 return fold_convert (type, tem);
1218 /* Split a tree IN into a constant, literal and variable parts that could be
1219 combined with CODE to make IN. "constant" means an expression with
1220 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1221 commutative arithmetic operation. Store the constant part into *CONP,
1222 the literal in *LITP and return the variable part. If a part isn't
1223 present, set it to null. If the tree does not decompose in this way,
1224 return the entire tree as the variable part and the other parts as null.
1226 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1227 case, we negate an operand that was subtracted. Except if it is a
1228 literal for which we use *MINUS_LITP instead.
1230 If NEGATE_P is true, we are negating all of IN, again except a literal
1231 for which we use *MINUS_LITP instead.
1233 If IN is itself a literal or constant, return it as appropriate.
1235 Note that we do not guarantee that any of the three values will be the
1236 same type as IN, but they will have the same signedness and mode. */
1239 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1240 tree *minus_litp, int negate_p)
1248 /* Strip any conversions that don't change the machine mode or signedness. */
1249 STRIP_SIGN_NOPS (in);
1251 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1253 else if (TREE_CODE (in) == code
1254 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1255 /* We can associate addition and subtraction together (even
1256 though the C standard doesn't say so) for integers because
1257 the value is not affected. For reals, the value might be
1258 affected, so we can't. */
1259 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1260 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1262 tree op0 = TREE_OPERAND (in, 0);
1263 tree op1 = TREE_OPERAND (in, 1);
1264 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1265 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1267 /* First see if either of the operands is a literal, then a constant. */
1268 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1269 *litp = op0, op0 = 0;
1270 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1271 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1273 if (op0 != 0 && TREE_CONSTANT (op0))
1274 *conp = op0, op0 = 0;
1275 else if (op1 != 0 && TREE_CONSTANT (op1))
1276 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1278 /* If we haven't dealt with either operand, this is not a case we can
1279 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1280 if (op0 != 0 && op1 != 0)
1285 var = op1, neg_var_p = neg1_p;
1287 /* Now do any needed negations. */
1289 *minus_litp = *litp, *litp = 0;
1291 *conp = negate_expr (*conp);
1293 var = negate_expr (var);
1295 else if (TREE_CONSTANT (in))
1303 *minus_litp = *litp, *litp = 0;
1304 else if (*minus_litp)
1305 *litp = *minus_litp, *minus_litp = 0;
1306 *conp = negate_expr (*conp);
1307 var = negate_expr (var);
1313 /* Re-associate trees split by the above function. T1 and T2 are either
1314 expressions to associate or null. Return the new expression, if any. If
1315 we build an operation, do it in TYPE and with CODE. */
1318 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1325 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1326 try to fold this since we will have infinite recursion. But do
1327 deal with any NEGATE_EXPRs. */
1328 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1329 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1331 if (code == PLUS_EXPR)
1333 if (TREE_CODE (t1) == NEGATE_EXPR)
1334 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1335 fold_convert (type, TREE_OPERAND (t1, 0)));
1336 else if (TREE_CODE (t2) == NEGATE_EXPR)
1337 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1338 fold_convert (type, TREE_OPERAND (t2, 0)));
1339 else if (integer_zerop (t2))
1340 return fold_convert (type, t1);
1342 else if (code == MINUS_EXPR)
1344 if (integer_zerop (t2))
1345 return fold_convert (type, t1);
1348 return build2 (code, type, fold_convert (type, t1),
1349 fold_convert (type, t2));
1352 return fold_build2 (code, type, fold_convert (type, t1),
1353 fold_convert (type, t2));
1356 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1357 to produce a new constant.
1359 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1362 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1364 unsigned HOST_WIDE_INT int1l, int2l;
1365 HOST_WIDE_INT int1h, int2h;
1366 unsigned HOST_WIDE_INT low;
1368 unsigned HOST_WIDE_INT garbagel;
1369 HOST_WIDE_INT garbageh;
1371 tree type = TREE_TYPE (arg1);
1372 int uns = TYPE_UNSIGNED (type);
1374 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1377 int1l = TREE_INT_CST_LOW (arg1);
1378 int1h = TREE_INT_CST_HIGH (arg1);
1379 int2l = TREE_INT_CST_LOW (arg2);
1380 int2h = TREE_INT_CST_HIGH (arg2);
1385 low = int1l | int2l, hi = int1h | int2h;
1389 low = int1l ^ int2l, hi = int1h ^ int2h;
1393 low = int1l & int2l, hi = int1h & int2h;
1399 /* It's unclear from the C standard whether shifts can overflow.
1400 The following code ignores overflow; perhaps a C standard
1401 interpretation ruling is needed. */
1402 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1409 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1414 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1418 neg_double (int2l, int2h, &low, &hi);
1419 add_double (int1l, int1h, low, hi, &low, &hi);
1420 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1424 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1427 case TRUNC_DIV_EXPR:
1428 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1429 case EXACT_DIV_EXPR:
1430 /* This is a shortcut for a common special case. */
1431 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1432 && ! TREE_CONSTANT_OVERFLOW (arg1)
1433 && ! TREE_CONSTANT_OVERFLOW (arg2)
1434 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1436 if (code == CEIL_DIV_EXPR)
1439 low = int1l / int2l, hi = 0;
1443 /* ... fall through ... */
1445 case ROUND_DIV_EXPR:
1446 if (int2h == 0 && int2l == 1)
1448 low = int1l, hi = int1h;
1451 if (int1l == int2l && int1h == int2h
1452 && ! (int1l == 0 && int1h == 0))
1457 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1458 &low, &hi, &garbagel, &garbageh);
1461 case TRUNC_MOD_EXPR:
1462 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1463 /* This is a shortcut for a common special case. */
1464 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1465 && ! TREE_CONSTANT_OVERFLOW (arg1)
1466 && ! TREE_CONSTANT_OVERFLOW (arg2)
1467 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1469 if (code == CEIL_MOD_EXPR)
1471 low = int1l % int2l, hi = 0;
1475 /* ... fall through ... */
1477 case ROUND_MOD_EXPR:
1478 overflow = div_and_round_double (code, uns,
1479 int1l, int1h, int2l, int2h,
1480 &garbagel, &garbageh, &low, &hi);
1486 low = (((unsigned HOST_WIDE_INT) int1h
1487 < (unsigned HOST_WIDE_INT) int2h)
1488 || (((unsigned HOST_WIDE_INT) int1h
1489 == (unsigned HOST_WIDE_INT) int2h)
1492 low = (int1h < int2h
1493 || (int1h == int2h && int1l < int2l));
1495 if (low == (code == MIN_EXPR))
1496 low = int1l, hi = int1h;
1498 low = int2l, hi = int2h;
1505 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1509 /* Propagate overflow flags ourselves. */
1510 if (((!uns || is_sizetype) && overflow)
1511 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1514 TREE_OVERFLOW (t) = 1;
1515 TREE_CONSTANT_OVERFLOW (t) = 1;
1517 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1520 TREE_CONSTANT_OVERFLOW (t) = 1;
1524 t = force_fit_type (t, 1,
1525 ((!uns || is_sizetype) && overflow)
1526 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1527 TREE_CONSTANT_OVERFLOW (arg1)
1528 | TREE_CONSTANT_OVERFLOW (arg2));
1533 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1534 constant. We assume ARG1 and ARG2 have the same data type, or at least
1535 are the same kind of constant and the same machine mode.
1537 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1540 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1545 if (TREE_CODE (arg1) == INTEGER_CST)
1546 return int_const_binop (code, arg1, arg2, notrunc);
1548 if (TREE_CODE (arg1) == REAL_CST)
1550 enum machine_mode mode;
1553 REAL_VALUE_TYPE value;
1554 REAL_VALUE_TYPE result;
1558 d1 = TREE_REAL_CST (arg1);
1559 d2 = TREE_REAL_CST (arg2);
1561 type = TREE_TYPE (arg1);
1562 mode = TYPE_MODE (type);
1564 /* Don't perform operation if we honor signaling NaNs and
1565 either operand is a NaN. */
1566 if (HONOR_SNANS (mode)
1567 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1570 /* Don't perform operation if it would raise a division
1571 by zero exception. */
1572 if (code == RDIV_EXPR
1573 && REAL_VALUES_EQUAL (d2, dconst0)
1574 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1577 /* If either operand is a NaN, just return it. Otherwise, set up
1578 for floating-point trap; we return an overflow. */
1579 if (REAL_VALUE_ISNAN (d1))
1581 else if (REAL_VALUE_ISNAN (d2))
1584 inexact = real_arithmetic (&value, code, &d1, &d2);
1585 real_convert (&result, mode, &value);
1587 /* Don't constant fold this floating point operation if
1588 the result has overflowed and flag_trapping_math. */
1590 if (flag_trapping_math
1591 && MODE_HAS_INFINITIES (mode)
1592 && REAL_VALUE_ISINF (result)
1593 && !REAL_VALUE_ISINF (d1)
1594 && !REAL_VALUE_ISINF (d2))
1597 /* Don't constant fold this floating point operation if the
1598 result may dependent upon the run-time rounding mode and
1599 flag_rounding_math is set, or if GCC's software emulation
1600 is unable to accurately represent the result. */
1602 if ((flag_rounding_math
1603 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1604 && !flag_unsafe_math_optimizations))
1605 && (inexact || !real_identical (&result, &value)))
1608 t = build_real (type, result);
1610 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1611 TREE_CONSTANT_OVERFLOW (t)
1613 | TREE_CONSTANT_OVERFLOW (arg1)
1614 | TREE_CONSTANT_OVERFLOW (arg2);
1617 if (TREE_CODE (arg1) == COMPLEX_CST)
1619 tree type = TREE_TYPE (arg1);
1620 tree r1 = TREE_REALPART (arg1);
1621 tree i1 = TREE_IMAGPART (arg1);
1622 tree r2 = TREE_REALPART (arg2);
1623 tree i2 = TREE_IMAGPART (arg2);
1629 t = build_complex (type,
1630 const_binop (PLUS_EXPR, r1, r2, notrunc),
1631 const_binop (PLUS_EXPR, i1, i2, notrunc));
1635 t = build_complex (type,
1636 const_binop (MINUS_EXPR, r1, r2, notrunc),
1637 const_binop (MINUS_EXPR, i1, i2, notrunc));
1641 t = build_complex (type,
1642 const_binop (MINUS_EXPR,
1643 const_binop (MULT_EXPR,
1645 const_binop (MULT_EXPR,
1648 const_binop (PLUS_EXPR,
1649 const_binop (MULT_EXPR,
1651 const_binop (MULT_EXPR,
1658 tree t1, t2, real, imag;
1660 = const_binop (PLUS_EXPR,
1661 const_binop (MULT_EXPR, r2, r2, notrunc),
1662 const_binop (MULT_EXPR, i2, i2, notrunc),
1665 t1 = const_binop (PLUS_EXPR,
1666 const_binop (MULT_EXPR, r1, r2, notrunc),
1667 const_binop (MULT_EXPR, i1, i2, notrunc),
1669 t2 = const_binop (MINUS_EXPR,
1670 const_binop (MULT_EXPR, i1, r2, notrunc),
1671 const_binop (MULT_EXPR, r1, i2, notrunc),
1674 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1676 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1677 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1681 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1682 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1687 t = build_complex (type, real, imag);
1699 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1700 indicates which particular sizetype to create. */
1703 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1705 return build_int_cst (sizetype_tab[(int) kind], number);
1708 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1709 is a tree code. The type of the result is taken from the operands.
1710 Both must be the same type integer type and it must be a size type.
1711 If the operands are constant, so is the result. */
1714 size_binop (enum tree_code code, tree arg0, tree arg1)
1716 tree type = TREE_TYPE (arg0);
1718 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1719 && type == TREE_TYPE (arg1));
1721 /* Handle the special case of two integer constants faster. */
1722 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1724 /* And some specific cases even faster than that. */
1725 if (code == PLUS_EXPR && integer_zerop (arg0))
1727 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1728 && integer_zerop (arg1))
1730 else if (code == MULT_EXPR && integer_onep (arg0))
1733 /* Handle general case of two integer constants. */
1734 return int_const_binop (code, arg0, arg1, 0);
1737 if (arg0 == error_mark_node || arg1 == error_mark_node)
1738 return error_mark_node;
1740 return fold_build2 (code, type, arg0, arg1);
1743 /* Given two values, either both of sizetype or both of bitsizetype,
1744 compute the difference between the two values. Return the value
1745 in signed type corresponding to the type of the operands. */
1748 size_diffop (tree arg0, tree arg1)
1750 tree type = TREE_TYPE (arg0);
1753 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1754 && type == TREE_TYPE (arg1));
1756 /* If the type is already signed, just do the simple thing. */
1757 if (!TYPE_UNSIGNED (type))
1758 return size_binop (MINUS_EXPR, arg0, arg1);
1760 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1762 /* If either operand is not a constant, do the conversions to the signed
1763 type and subtract. The hardware will do the right thing with any
1764 overflow in the subtraction. */
1765 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1766 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1767 fold_convert (ctype, arg1));
1769 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1770 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1771 overflow) and negate (which can't either). Special-case a result
1772 of zero while we're here. */
1773 if (tree_int_cst_equal (arg0, arg1))
1774 return fold_convert (ctype, integer_zero_node);
1775 else if (tree_int_cst_lt (arg1, arg0))
1776 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1778 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1779 fold_convert (ctype, size_binop (MINUS_EXPR,
1783 /* A subroutine of fold_convert_const handling conversions of an
1784 INTEGER_CST to another integer type. */
1787 fold_convert_const_int_from_int (tree type, tree arg1)
1791 /* Given an integer constant, make new constant with new type,
1792 appropriately sign-extended or truncated. */
1793 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1794 TREE_INT_CST_HIGH (arg1));
1796 t = force_fit_type (t,
1797 /* Don't set the overflow when
1798 converting a pointer */
1799 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1800 (TREE_INT_CST_HIGH (arg1) < 0
1801 && (TYPE_UNSIGNED (type)
1802 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1803 | TREE_OVERFLOW (arg1),
1804 TREE_CONSTANT_OVERFLOW (arg1));
1809 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1810 to an integer type. */
1813 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1818 /* The following code implements the floating point to integer
1819 conversion rules required by the Java Language Specification,
1820 that IEEE NaNs are mapped to zero and values that overflow
1821 the target precision saturate, i.e. values greater than
1822 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1823 are mapped to INT_MIN. These semantics are allowed by the
1824 C and C++ standards that simply state that the behavior of
1825 FP-to-integer conversion is unspecified upon overflow. */
1827 HOST_WIDE_INT high, low;
1829 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1833 case FIX_TRUNC_EXPR:
1834 real_trunc (&r, VOIDmode, &x);
1838 real_ceil (&r, VOIDmode, &x);
1841 case FIX_FLOOR_EXPR:
1842 real_floor (&r, VOIDmode, &x);
1845 case FIX_ROUND_EXPR:
1846 real_round (&r, VOIDmode, &x);
1853 /* If R is NaN, return zero and show we have an overflow. */
1854 if (REAL_VALUE_ISNAN (r))
1861 /* See if R is less than the lower bound or greater than the
1866 tree lt = TYPE_MIN_VALUE (type);
1867 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1868 if (REAL_VALUES_LESS (r, l))
1871 high = TREE_INT_CST_HIGH (lt);
1872 low = TREE_INT_CST_LOW (lt);
1878 tree ut = TYPE_MAX_VALUE (type);
1881 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1882 if (REAL_VALUES_LESS (u, r))
1885 high = TREE_INT_CST_HIGH (ut);
1886 low = TREE_INT_CST_LOW (ut);
1892 REAL_VALUE_TO_INT (&low, &high, r);
1894 t = build_int_cst_wide (type, low, high);
1896 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1897 TREE_CONSTANT_OVERFLOW (arg1));
1901 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1902 to another floating point type. */
1905 fold_convert_const_real_from_real (tree type, tree arg1)
1907 REAL_VALUE_TYPE value;
1910 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1911 t = build_real (type, value);
1913 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1914 TREE_CONSTANT_OVERFLOW (t)
1915 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1919 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1920 type TYPE. If no simplification can be done return NULL_TREE. */
1923 fold_convert_const (enum tree_code code, tree type, tree arg1)
1925 if (TREE_TYPE (arg1) == type)
1928 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1930 if (TREE_CODE (arg1) == INTEGER_CST)
1931 return fold_convert_const_int_from_int (type, arg1);
1932 else if (TREE_CODE (arg1) == REAL_CST)
1933 return fold_convert_const_int_from_real (code, type, arg1);
1935 else if (TREE_CODE (type) == REAL_TYPE)
1937 if (TREE_CODE (arg1) == INTEGER_CST)
1938 return build_real_from_int_cst (type, arg1);
1939 if (TREE_CODE (arg1) == REAL_CST)
1940 return fold_convert_const_real_from_real (type, arg1);
1945 /* Construct a vector of zero elements of vector type TYPE. */
1948 build_zero_vector (tree type)
1953 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1954 units = TYPE_VECTOR_SUBPARTS (type);
1957 for (i = 0; i < units; i++)
1958 list = tree_cons (NULL_TREE, elem, list);
1959 return build_vector (type, list);
1962 /* Convert expression ARG to type TYPE. Used by the middle-end for
1963 simple conversions in preference to calling the front-end's convert. */
1966 fold_convert (tree type, tree arg)
1968 tree orig = TREE_TYPE (arg);
1974 if (TREE_CODE (arg) == ERROR_MARK
1975 || TREE_CODE (type) == ERROR_MARK
1976 || TREE_CODE (orig) == ERROR_MARK)
1977 return error_mark_node;
1979 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1980 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1981 TYPE_MAIN_VARIANT (orig)))
1982 return fold_build1 (NOP_EXPR, type, arg);
1984 switch (TREE_CODE (type))
1986 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1987 case POINTER_TYPE: case REFERENCE_TYPE:
1989 if (TREE_CODE (arg) == INTEGER_CST)
1991 tem = fold_convert_const (NOP_EXPR, type, arg);
1992 if (tem != NULL_TREE)
1995 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1996 || TREE_CODE (orig) == OFFSET_TYPE)
1997 return fold_build1 (NOP_EXPR, type, arg);
1998 if (TREE_CODE (orig) == COMPLEX_TYPE)
2000 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2001 return fold_convert (type, tem);
2003 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2004 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2005 return fold_build1 (NOP_EXPR, type, arg);
2008 if (TREE_CODE (arg) == INTEGER_CST)
2010 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2011 if (tem != NULL_TREE)
2014 else if (TREE_CODE (arg) == REAL_CST)
2016 tem = fold_convert_const (NOP_EXPR, type, arg);
2017 if (tem != NULL_TREE)
2021 switch (TREE_CODE (orig))
2023 case INTEGER_TYPE: case CHAR_TYPE:
2024 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2025 case POINTER_TYPE: case REFERENCE_TYPE:
2026 return fold_build1 (FLOAT_EXPR, type, arg);
2029 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
2033 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2034 return fold_convert (type, tem);
2041 switch (TREE_CODE (orig))
2043 case INTEGER_TYPE: case CHAR_TYPE:
2044 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2045 case POINTER_TYPE: case REFERENCE_TYPE:
2047 return build2 (COMPLEX_EXPR, type,
2048 fold_convert (TREE_TYPE (type), arg),
2049 fold_convert (TREE_TYPE (type), integer_zero_node));
2054 if (TREE_CODE (arg) == COMPLEX_EXPR)
2056 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2057 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2058 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2061 arg = save_expr (arg);
2062 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2063 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2064 rpart = fold_convert (TREE_TYPE (type), rpart);
2065 ipart = fold_convert (TREE_TYPE (type), ipart);
2066 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2074 if (integer_zerop (arg))
2075 return build_zero_vector (type);
2076 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2077 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2078 || TREE_CODE (orig) == VECTOR_TYPE);
2079 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2082 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2089 /* Return false if expr can be assumed not to be an lvalue, true
2093 maybe_lvalue_p (tree x)
2095 /* We only need to wrap lvalue tree codes. */
2096 switch (TREE_CODE (x))
2107 case ALIGN_INDIRECT_REF:
2108 case MISALIGNED_INDIRECT_REF:
2110 case ARRAY_RANGE_REF:
2116 case PREINCREMENT_EXPR:
2117 case PREDECREMENT_EXPR:
2119 case TRY_CATCH_EXPR:
2120 case WITH_CLEANUP_EXPR:
2131 /* Assume the worst for front-end tree codes. */
2132 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2140 /* Return an expr equal to X but certainly not valid as an lvalue. */
2145 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2150 if (! maybe_lvalue_p (x))
2152 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2155 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2156 Zero means allow extended lvalues. */
2158 int pedantic_lvalues;
2160 /* When pedantic, return an expr equal to X but certainly not valid as a
2161 pedantic lvalue. Otherwise, return X. */
2164 pedantic_non_lvalue (tree x)
2166 if (pedantic_lvalues)
2167 return non_lvalue (x);
2172 /* Given a tree comparison code, return the code that is the logical inverse
2173 of the given code. It is not safe to do this for floating-point
2174 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2175 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2178 invert_tree_comparison (enum tree_code code, bool honor_nans)
2180 if (honor_nans && flag_trapping_math)
2190 return honor_nans ? UNLE_EXPR : LE_EXPR;
2192 return honor_nans ? UNLT_EXPR : LT_EXPR;
2194 return honor_nans ? UNGE_EXPR : GE_EXPR;
2196 return honor_nans ? UNGT_EXPR : GT_EXPR;
2210 return UNORDERED_EXPR;
2211 case UNORDERED_EXPR:
2212 return ORDERED_EXPR;
2218 /* Similar, but return the comparison that results if the operands are
2219 swapped. This is safe for floating-point. */
2222 swap_tree_comparison (enum tree_code code)
2229 case UNORDERED_EXPR:
2255 /* Convert a comparison tree code from an enum tree_code representation
2256 into a compcode bit-based encoding. This function is the inverse of
2257 compcode_to_comparison. */
2259 static enum comparison_code
2260 comparison_to_compcode (enum tree_code code)
2277 return COMPCODE_ORD;
2278 case UNORDERED_EXPR:
2279 return COMPCODE_UNORD;
2281 return COMPCODE_UNLT;
2283 return COMPCODE_UNEQ;
2285 return COMPCODE_UNLE;
2287 return COMPCODE_UNGT;
2289 return COMPCODE_LTGT;
2291 return COMPCODE_UNGE;
2297 /* Convert a compcode bit-based encoding of a comparison operator back
2298 to GCC's enum tree_code representation. This function is the
2299 inverse of comparison_to_compcode. */
2301 static enum tree_code
2302 compcode_to_comparison (enum comparison_code code)
2319 return ORDERED_EXPR;
2320 case COMPCODE_UNORD:
2321 return UNORDERED_EXPR;
2339 /* Return a tree for the comparison which is the combination of
2340 doing the AND or OR (depending on CODE) of the two operations LCODE
2341 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2342 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2343 if this makes the transformation invalid. */
2346 combine_comparisons (enum tree_code code, enum tree_code lcode,
2347 enum tree_code rcode, tree truth_type,
2348 tree ll_arg, tree lr_arg)
2350 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2351 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2352 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2353 enum comparison_code compcode;
2357 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2358 compcode = lcompcode & rcompcode;
2361 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2362 compcode = lcompcode | rcompcode;
2371 /* Eliminate unordered comparisons, as well as LTGT and ORD
2372 which are not used unless the mode has NaNs. */
2373 compcode &= ~COMPCODE_UNORD;
2374 if (compcode == COMPCODE_LTGT)
2375 compcode = COMPCODE_NE;
2376 else if (compcode == COMPCODE_ORD)
2377 compcode = COMPCODE_TRUE;
2379 else if (flag_trapping_math)
2381 /* Check that the original operation and the optimized ones will trap
2382 under the same condition. */
2383 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2384 && (lcompcode != COMPCODE_EQ)
2385 && (lcompcode != COMPCODE_ORD);
2386 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2387 && (rcompcode != COMPCODE_EQ)
2388 && (rcompcode != COMPCODE_ORD);
2389 bool trap = (compcode & COMPCODE_UNORD) == 0
2390 && (compcode != COMPCODE_EQ)
2391 && (compcode != COMPCODE_ORD);
2393 /* In a short-circuited boolean expression the LHS might be
2394 such that the RHS, if evaluated, will never trap. For
2395 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2396 if neither x nor y is NaN. (This is a mixed blessing: for
2397 example, the expression above will never trap, hence
2398 optimizing it to x < y would be invalid). */
2399 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2400 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2403 /* If the comparison was short-circuited, and only the RHS
2404 trapped, we may now generate a spurious trap. */
2406 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2409 /* If we changed the conditions that cause a trap, we lose. */
2410 if ((ltrap || rtrap) != trap)
2414 if (compcode == COMPCODE_TRUE)
2415 return constant_boolean_node (true, truth_type);
2416 else if (compcode == COMPCODE_FALSE)
2417 return constant_boolean_node (false, truth_type);
2419 return fold_build2 (compcode_to_comparison (compcode),
2420 truth_type, ll_arg, lr_arg);
2423 /* Return nonzero if CODE is a tree code that represents a truth value. */
2426 truth_value_p (enum tree_code code)
2428 return (TREE_CODE_CLASS (code) == tcc_comparison
2429 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2430 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2431 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2434 /* Return nonzero if two operands (typically of the same tree node)
2435 are necessarily equal. If either argument has side-effects this
2436 function returns zero. FLAGS modifies behavior as follows:
2438 If OEP_ONLY_CONST is set, only return nonzero for constants.
2439 This function tests whether the operands are indistinguishable;
2440 it does not test whether they are equal using C's == operation.
2441 The distinction is important for IEEE floating point, because
2442 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2443 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2445 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2446 even though it may hold multiple values during a function.
2447 This is because a GCC tree node guarantees that nothing else is
2448 executed between the evaluation of its "operands" (which may often
2449 be evaluated in arbitrary order). Hence if the operands themselves
2450 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2451 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2452 unset means assuming isochronic (or instantaneous) tree equivalence.
2453 Unless comparing arbitrary expression trees, such as from different
2454 statements, this flag can usually be left unset.
2456 If OEP_PURE_SAME is set, then pure functions with identical arguments
2457 are considered the same. It is used when the caller has other ways
2458 to ensure that global memory is unchanged in between. */
2461 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2463 /* If either is ERROR_MARK, they aren't equal. */
2464 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2467 /* If both types don't have the same signedness, then we can't consider
2468 them equal. We must check this before the STRIP_NOPS calls
2469 because they may change the signedness of the arguments. */
2470 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2476 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2477 /* This is needed for conversions and for COMPONENT_REF.
2478 Might as well play it safe and always test this. */
2479 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2480 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2481 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2484 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2485 We don't care about side effects in that case because the SAVE_EXPR
2486 takes care of that for us. In all other cases, two expressions are
2487 equal if they have no side effects. If we have two identical
2488 expressions with side effects that should be treated the same due
2489 to the only side effects being identical SAVE_EXPR's, that will
2490 be detected in the recursive calls below. */
2491 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2492 && (TREE_CODE (arg0) == SAVE_EXPR
2493 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2496 /* Next handle constant cases, those for which we can return 1 even
2497 if ONLY_CONST is set. */
2498 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2499 switch (TREE_CODE (arg0))
2502 return (! TREE_CONSTANT_OVERFLOW (arg0)
2503 && ! TREE_CONSTANT_OVERFLOW (arg1)
2504 && tree_int_cst_equal (arg0, arg1));
2507 return (! TREE_CONSTANT_OVERFLOW (arg0)
2508 && ! TREE_CONSTANT_OVERFLOW (arg1)
2509 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2510 TREE_REAL_CST (arg1)));
2516 if (TREE_CONSTANT_OVERFLOW (arg0)
2517 || TREE_CONSTANT_OVERFLOW (arg1))
2520 v1 = TREE_VECTOR_CST_ELTS (arg0);
2521 v2 = TREE_VECTOR_CST_ELTS (arg1);
2524 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2527 v1 = TREE_CHAIN (v1);
2528 v2 = TREE_CHAIN (v2);
2535 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2537 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2541 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2542 && ! memcmp (TREE_STRING_POINTER (arg0),
2543 TREE_STRING_POINTER (arg1),
2544 TREE_STRING_LENGTH (arg0)));
2547 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2553 if (flags & OEP_ONLY_CONST)
2556 /* Define macros to test an operand from arg0 and arg1 for equality and a
2557 variant that allows null and views null as being different from any
2558 non-null value. In the latter case, if either is null, the both
2559 must be; otherwise, do the normal comparison. */
2560 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2561 TREE_OPERAND (arg1, N), flags)
2563 #define OP_SAME_WITH_NULL(N) \
2564 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2565 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2567 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2570 /* Two conversions are equal only if signedness and modes match. */
2571 switch (TREE_CODE (arg0))
2576 case FIX_TRUNC_EXPR:
2577 case FIX_FLOOR_EXPR:
2578 case FIX_ROUND_EXPR:
2579 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2580 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2590 case tcc_comparison:
2592 if (OP_SAME (0) && OP_SAME (1))
2595 /* For commutative ops, allow the other order. */
2596 return (commutative_tree_code (TREE_CODE (arg0))
2597 && operand_equal_p (TREE_OPERAND (arg0, 0),
2598 TREE_OPERAND (arg1, 1), flags)
2599 && operand_equal_p (TREE_OPERAND (arg0, 1),
2600 TREE_OPERAND (arg1, 0), flags));
2603 /* If either of the pointer (or reference) expressions we are
2604 dereferencing contain a side effect, these cannot be equal. */
2605 if (TREE_SIDE_EFFECTS (arg0)
2606 || TREE_SIDE_EFFECTS (arg1))
2609 switch (TREE_CODE (arg0))
2612 case ALIGN_INDIRECT_REF:
2613 case MISALIGNED_INDIRECT_REF:
2619 case ARRAY_RANGE_REF:
2620 /* Operands 2 and 3 may be null. */
2623 && OP_SAME_WITH_NULL (2)
2624 && OP_SAME_WITH_NULL (3));
2627 /* Handle operand 2 the same as for ARRAY_REF. */
2628 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2631 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2637 case tcc_expression:
2638 switch (TREE_CODE (arg0))
2641 case TRUTH_NOT_EXPR:
2644 case TRUTH_ANDIF_EXPR:
2645 case TRUTH_ORIF_EXPR:
2646 return OP_SAME (0) && OP_SAME (1);
2648 case TRUTH_AND_EXPR:
2650 case TRUTH_XOR_EXPR:
2651 if (OP_SAME (0) && OP_SAME (1))
2654 /* Otherwise take into account this is a commutative operation. */
2655 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2656 TREE_OPERAND (arg1, 1), flags)
2657 && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 TREE_OPERAND (arg1, 0), flags));
2661 /* If the CALL_EXPRs call different functions, then they
2662 clearly can not be equal. */
2667 unsigned int cef = call_expr_flags (arg0);
2668 if (flags & OEP_PURE_SAME)
2669 cef &= ECF_CONST | ECF_PURE;
2676 /* Now see if all the arguments are the same. operand_equal_p
2677 does not handle TREE_LIST, so we walk the operands here
2678 feeding them to operand_equal_p. */
2679 arg0 = TREE_OPERAND (arg0, 1);
2680 arg1 = TREE_OPERAND (arg1, 1);
2681 while (arg0 && arg1)
2683 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2687 arg0 = TREE_CHAIN (arg0);
2688 arg1 = TREE_CHAIN (arg1);
2691 /* If we get here and both argument lists are exhausted
2692 then the CALL_EXPRs are equal. */
2693 return ! (arg0 || arg1);
2699 case tcc_declaration:
2700 /* Consider __builtin_sqrt equal to sqrt. */
2701 return (TREE_CODE (arg0) == FUNCTION_DECL
2702 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2703 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2704 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2711 #undef OP_SAME_WITH_NULL
2714 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2715 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2717 When in doubt, return 0. */
2720 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2722 int unsignedp1, unsignedpo;
2723 tree primarg0, primarg1, primother;
2724 unsigned int correct_width;
2726 if (operand_equal_p (arg0, arg1, 0))
2729 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2730 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2733 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2734 and see if the inner values are the same. This removes any
2735 signedness comparison, which doesn't matter here. */
2736 primarg0 = arg0, primarg1 = arg1;
2737 STRIP_NOPS (primarg0);
2738 STRIP_NOPS (primarg1);
2739 if (operand_equal_p (primarg0, primarg1, 0))
2742 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2743 actual comparison operand, ARG0.
2745 First throw away any conversions to wider types
2746 already present in the operands. */
2748 primarg1 = get_narrower (arg1, &unsignedp1);
2749 primother = get_narrower (other, &unsignedpo);
2751 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2752 if (unsignedp1 == unsignedpo
2753 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2754 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2756 tree type = TREE_TYPE (arg0);
2758 /* Make sure shorter operand is extended the right way
2759 to match the longer operand. */
2760 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2761 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2763 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2770 /* See if ARG is an expression that is either a comparison or is performing
2771 arithmetic on comparisons. The comparisons must only be comparing
2772 two different values, which will be stored in *CVAL1 and *CVAL2; if
2773 they are nonzero it means that some operands have already been found.
2774 No variables may be used anywhere else in the expression except in the
2775 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2776 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2778 If this is true, return 1. Otherwise, return zero. */
2781 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2783 enum tree_code code = TREE_CODE (arg);
2784 enum tree_code_class class = TREE_CODE_CLASS (code);
2786 /* We can handle some of the tcc_expression cases here. */
2787 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2789 else if (class == tcc_expression
2790 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2791 || code == COMPOUND_EXPR))
2794 else if (class == tcc_expression && code == SAVE_EXPR
2795 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2797 /* If we've already found a CVAL1 or CVAL2, this expression is
2798 two complex to handle. */
2799 if (*cval1 || *cval2)
2809 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2812 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2813 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2814 cval1, cval2, save_p));
2819 case tcc_expression:
2820 if (code == COND_EXPR)
2821 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2822 cval1, cval2, save_p)
2823 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2824 cval1, cval2, save_p)
2825 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2826 cval1, cval2, save_p));
2829 case tcc_comparison:
2830 /* First see if we can handle the first operand, then the second. For
2831 the second operand, we know *CVAL1 can't be zero. It must be that
2832 one side of the comparison is each of the values; test for the
2833 case where this isn't true by failing if the two operands
2836 if (operand_equal_p (TREE_OPERAND (arg, 0),
2837 TREE_OPERAND (arg, 1), 0))
2841 *cval1 = TREE_OPERAND (arg, 0);
2842 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2844 else if (*cval2 == 0)
2845 *cval2 = TREE_OPERAND (arg, 0);
2846 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2851 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2853 else if (*cval2 == 0)
2854 *cval2 = TREE_OPERAND (arg, 1);
2855 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2867 /* ARG is a tree that is known to contain just arithmetic operations and
2868 comparisons. Evaluate the operations in the tree substituting NEW0 for
2869 any occurrence of OLD0 as an operand of a comparison and likewise for
2873 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2875 tree type = TREE_TYPE (arg);
2876 enum tree_code code = TREE_CODE (arg);
2877 enum tree_code_class class = TREE_CODE_CLASS (code);
2879 /* We can handle some of the tcc_expression cases here. */
2880 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2882 else if (class == tcc_expression
2883 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2889 return fold_build1 (code, type,
2890 eval_subst (TREE_OPERAND (arg, 0),
2891 old0, new0, old1, new1));
2894 return fold_build2 (code, type,
2895 eval_subst (TREE_OPERAND (arg, 0),
2896 old0, new0, old1, new1),
2897 eval_subst (TREE_OPERAND (arg, 1),
2898 old0, new0, old1, new1));
2900 case tcc_expression:
2904 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2907 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2910 return fold_build3 (code, type,
2911 eval_subst (TREE_OPERAND (arg, 0),
2912 old0, new0, old1, new1),
2913 eval_subst (TREE_OPERAND (arg, 1),
2914 old0, new0, old1, new1),
2915 eval_subst (TREE_OPERAND (arg, 2),
2916 old0, new0, old1, new1));
2920 /* Fall through - ??? */
2922 case tcc_comparison:
2924 tree arg0 = TREE_OPERAND (arg, 0);
2925 tree arg1 = TREE_OPERAND (arg, 1);
2927 /* We need to check both for exact equality and tree equality. The
2928 former will be true if the operand has a side-effect. In that
2929 case, we know the operand occurred exactly once. */
2931 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2933 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2936 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2938 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2941 return fold_build2 (code, type, arg0, arg1);
2949 /* Return a tree for the case when the result of an expression is RESULT
2950 converted to TYPE and OMITTED was previously an operand of the expression
2951 but is now not needed (e.g., we folded OMITTED * 0).
2953 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2954 the conversion of RESULT to TYPE. */
2957 omit_one_operand (tree type, tree result, tree omitted)
2959 tree t = fold_convert (type, result);
2961 if (TREE_SIDE_EFFECTS (omitted))
2962 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2964 return non_lvalue (t);
2967 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2970 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2972 tree t = fold_convert (type, result);
2974 if (TREE_SIDE_EFFECTS (omitted))
2975 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2977 return pedantic_non_lvalue (t);
2980 /* Return a tree for the case when the result of an expression is RESULT
2981 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2982 of the expression but are now not needed.
2984 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2985 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2986 evaluated before OMITTED2. Otherwise, if neither has side effects,
2987 just do the conversion of RESULT to TYPE. */
2990 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2992 tree t = fold_convert (type, result);
2994 if (TREE_SIDE_EFFECTS (omitted2))
2995 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2996 if (TREE_SIDE_EFFECTS (omitted1))
2997 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2999 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3003 /* Return a simplified tree node for the truth-negation of ARG. This
3004 never alters ARG itself. We assume that ARG is an operation that
3005 returns a truth value (0 or 1).
3007 FIXME: one would think we would fold the result, but it causes
3008 problems with the dominator optimizer. */
3010 invert_truthvalue (tree arg)
3012 tree type = TREE_TYPE (arg);
3013 enum tree_code code = TREE_CODE (arg);
3015 if (code == ERROR_MARK)
3018 /* If this is a comparison, we can simply invert it, except for
3019 floating-point non-equality comparisons, in which case we just
3020 enclose a TRUTH_NOT_EXPR around what we have. */
3022 if (TREE_CODE_CLASS (code) == tcc_comparison)
3024 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3025 if (FLOAT_TYPE_P (op_type)
3026 && flag_trapping_math
3027 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3028 && code != NE_EXPR && code != EQ_EXPR)
3029 return build1 (TRUTH_NOT_EXPR, type, arg);
3032 code = invert_tree_comparison (code,
3033 HONOR_NANS (TYPE_MODE (op_type)));
3034 if (code == ERROR_MARK)
3035 return build1 (TRUTH_NOT_EXPR, type, arg);
3037 return build2 (code, type,
3038 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3045 return constant_boolean_node (integer_zerop (arg), type);
3047 case TRUTH_AND_EXPR:
3048 return build2 (TRUTH_OR_EXPR, type,
3049 invert_truthvalue (TREE_OPERAND (arg, 0)),
3050 invert_truthvalue (TREE_OPERAND (arg, 1)));
3053 return build2 (TRUTH_AND_EXPR, type,
3054 invert_truthvalue (TREE_OPERAND (arg, 0)),
3055 invert_truthvalue (TREE_OPERAND (arg, 1)));
3057 case TRUTH_XOR_EXPR:
3058 /* Here we can invert either operand. We invert the first operand
3059 unless the second operand is a TRUTH_NOT_EXPR in which case our
3060 result is the XOR of the first operand with the inside of the
3061 negation of the second operand. */
3063 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3064 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3067 return build2 (TRUTH_XOR_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)),
3069 TREE_OPERAND (arg, 1));
3071 case TRUTH_ANDIF_EXPR:
3072 return build2 (TRUTH_ORIF_EXPR, type,
3073 invert_truthvalue (TREE_OPERAND (arg, 0)),
3074 invert_truthvalue (TREE_OPERAND (arg, 1)));
3076 case TRUTH_ORIF_EXPR:
3077 return build2 (TRUTH_ANDIF_EXPR, type,
3078 invert_truthvalue (TREE_OPERAND (arg, 0)),
3079 invert_truthvalue (TREE_OPERAND (arg, 1)));
3081 case TRUTH_NOT_EXPR:
3082 return TREE_OPERAND (arg, 0);
3086 tree arg1 = TREE_OPERAND (arg, 1);
3087 tree arg2 = TREE_OPERAND (arg, 2);
3088 /* A COND_EXPR may have a throw as one operand, which
3089 then has void type. Just leave void operands
3091 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3092 VOID_TYPE_P (TREE_TYPE (arg1))
3093 ? arg1 : invert_truthvalue (arg1),
3094 VOID_TYPE_P (TREE_TYPE (arg2))
3095 ? arg2 : invert_truthvalue (arg2));
3099 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3100 invert_truthvalue (TREE_OPERAND (arg, 1)));
3102 case NON_LVALUE_EXPR:
3103 return invert_truthvalue (TREE_OPERAND (arg, 0));
3106 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3111 return build1 (TREE_CODE (arg), type,
3112 invert_truthvalue (TREE_OPERAND (arg, 0)));
3115 if (!integer_onep (TREE_OPERAND (arg, 1)))
3117 return build2 (EQ_EXPR, type, arg,
3118 fold_convert (type, integer_zero_node));
3121 return build1 (TRUTH_NOT_EXPR, type, arg);
3123 case CLEANUP_POINT_EXPR:
3124 return build1 (CLEANUP_POINT_EXPR, type,
3125 invert_truthvalue (TREE_OPERAND (arg, 0)));
3130 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3131 return build1 (TRUTH_NOT_EXPR, type, arg);
3134 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3135 operands are another bit-wise operation with a common input. If so,
3136 distribute the bit operations to save an operation and possibly two if
3137 constants are involved. For example, convert
3138 (A | B) & (A | C) into A | (B & C)
3139 Further simplification will occur if B and C are constants.
3141 If this optimization cannot be done, 0 will be returned. */
3144 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3149 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3150 || TREE_CODE (arg0) == code
3151 || (TREE_CODE (arg0) != BIT_AND_EXPR
3152 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3155 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3157 common = TREE_OPERAND (arg0, 0);
3158 left = TREE_OPERAND (arg0, 1);
3159 right = TREE_OPERAND (arg1, 1);
3161 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3163 common = TREE_OPERAND (arg0, 0);
3164 left = TREE_OPERAND (arg0, 1);
3165 right = TREE_OPERAND (arg1, 0);
3167 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3169 common = TREE_OPERAND (arg0, 1);
3170 left = TREE_OPERAND (arg0, 0);
3171 right = TREE_OPERAND (arg1, 1);
3173 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3175 common = TREE_OPERAND (arg0, 1);
3176 left = TREE_OPERAND (arg0, 0);
3177 right = TREE_OPERAND (arg1, 0);
3182 return fold_build2 (TREE_CODE (arg0), type, common,
3183 fold_build2 (code, type, left, right));
3186 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3187 with code CODE. This optimization is unsafe. */
3189 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3191 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3192 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3194 /* (A / C) +- (B / C) -> (A +- B) / C. */
3196 && operand_equal_p (TREE_OPERAND (arg0, 1),
3197 TREE_OPERAND (arg1, 1), 0))
3198 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3199 fold_build2 (code, type,
3200 TREE_OPERAND (arg0, 0),
3201 TREE_OPERAND (arg1, 0)),
3202 TREE_OPERAND (arg0, 1));
3204 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3205 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3206 TREE_OPERAND (arg1, 0), 0)
3207 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3208 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3210 REAL_VALUE_TYPE r0, r1;
3211 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3212 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3214 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3216 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3217 real_arithmetic (&r0, code, &r0, &r1);
3218 return fold_build2 (MULT_EXPR, type,
3219 TREE_OPERAND (arg0, 0),
3220 build_real (type, r0));
3226 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3227 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3230 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3237 tree size = TYPE_SIZE (TREE_TYPE (inner));
3238 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3239 || POINTER_TYPE_P (TREE_TYPE (inner)))
3240 && host_integerp (size, 0)
3241 && tree_low_cst (size, 0) == bitsize)
3242 return fold_convert (type, inner);
3245 result = build3 (BIT_FIELD_REF, type, inner,
3246 size_int (bitsize), bitsize_int (bitpos));
3248 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3253 /* Optimize a bit-field compare.
3255 There are two cases: First is a compare against a constant and the
3256 second is a comparison of two items where the fields are at the same
3257 bit position relative to the start of a chunk (byte, halfword, word)
3258 large enough to contain it. In these cases we can avoid the shift
3259 implicit in bitfield extractions.
3261 For constants, we emit a compare of the shifted constant with the
3262 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3263 compared. For two fields at the same position, we do the ANDs with the
3264 similar mask and compare the result of the ANDs.
3266 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3267 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3268 are the left and right operands of the comparison, respectively.
3270 If the optimization described above can be done, we return the resulting
3271 tree. Otherwise we return zero. */
3274 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3277 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3278 tree type = TREE_TYPE (lhs);
3279 tree signed_type, unsigned_type;
3280 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3281 enum machine_mode lmode, rmode, nmode;
3282 int lunsignedp, runsignedp;
3283 int lvolatilep = 0, rvolatilep = 0;
3284 tree linner, rinner = NULL_TREE;
3288 /* Get all the information about the extractions being done. If the bit size
3289 if the same as the size of the underlying object, we aren't doing an
3290 extraction at all and so can do nothing. We also don't want to
3291 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3292 then will no longer be able to replace it. */
3293 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3294 &lunsignedp, &lvolatilep, false);
3295 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3296 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3301 /* If this is not a constant, we can only do something if bit positions,
3302 sizes, and signedness are the same. */
3303 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3304 &runsignedp, &rvolatilep, false);
3306 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3307 || lunsignedp != runsignedp || offset != 0
3308 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3312 /* See if we can find a mode to refer to this field. We should be able to,
3313 but fail if we can't. */
3314 nmode = get_best_mode (lbitsize, lbitpos,
3315 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3316 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3317 TYPE_ALIGN (TREE_TYPE (rinner))),
3318 word_mode, lvolatilep || rvolatilep);
3319 if (nmode == VOIDmode)
3322 /* Set signed and unsigned types of the precision of this mode for the
3324 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3325 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3327 /* Compute the bit position and size for the new reference and our offset
3328 within it. If the new reference is the same size as the original, we
3329 won't optimize anything, so return zero. */
3330 nbitsize = GET_MODE_BITSIZE (nmode);
3331 nbitpos = lbitpos & ~ (nbitsize - 1);
3333 if (nbitsize == lbitsize)
3336 if (BYTES_BIG_ENDIAN)
3337 lbitpos = nbitsize - lbitsize - lbitpos;
3339 /* Make the mask to be used against the extracted field. */
3340 mask = build_int_cst (unsigned_type, -1);
3341 mask = force_fit_type (mask, 0, false, false);
3342 mask = fold_convert (unsigned_type, mask);
3343 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3344 mask = const_binop (RSHIFT_EXPR, mask,
3345 size_int (nbitsize - lbitsize - lbitpos), 0);
3348 /* If not comparing with constant, just rework the comparison
3350 return build2 (code, compare_type,
3351 build2 (BIT_AND_EXPR, unsigned_type,
3352 make_bit_field_ref (linner, unsigned_type,
3353 nbitsize, nbitpos, 1),
3355 build2 (BIT_AND_EXPR, unsigned_type,
3356 make_bit_field_ref (rinner, unsigned_type,
3357 nbitsize, nbitpos, 1),
3360 /* Otherwise, we are handling the constant case. See if the constant is too
3361 big for the field. Warn and return a tree of for 0 (false) if so. We do
3362 this not only for its own sake, but to avoid having to test for this
3363 error case below. If we didn't, we might generate wrong code.
3365 For unsigned fields, the constant shifted right by the field length should
3366 be all zero. For signed fields, the high-order bits should agree with
3371 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3372 fold_convert (unsigned_type, rhs),
3373 size_int (lbitsize), 0)))
3375 warning (0, "comparison is always %d due to width of bit-field",
3377 return constant_boolean_node (code == NE_EXPR, compare_type);
3382 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3383 size_int (lbitsize - 1), 0);
3384 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3386 warning (0, "comparison is always %d due to width of bit-field",
3388 return constant_boolean_node (code == NE_EXPR, compare_type);
3392 /* Single-bit compares should always be against zero. */
3393 if (lbitsize == 1 && ! integer_zerop (rhs))
3395 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3396 rhs = fold_convert (type, integer_zero_node);
3399 /* Make a new bitfield reference, shift the constant over the
3400 appropriate number of bits and mask it with the computed mask
3401 (in case this was a signed field). If we changed it, make a new one. */
3402 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3405 TREE_SIDE_EFFECTS (lhs) = 1;
3406 TREE_THIS_VOLATILE (lhs) = 1;
3409 rhs = const_binop (BIT_AND_EXPR,
3410 const_binop (LSHIFT_EXPR,
3411 fold_convert (unsigned_type, rhs),
3412 size_int (lbitpos), 0),
3415 return build2 (code, compare_type,
3416 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3420 /* Subroutine for fold_truthop: decode a field reference.
3422 If EXP is a comparison reference, we return the innermost reference.
3424 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3425 set to the starting bit number.
3427 If the innermost field can be completely contained in a mode-sized
3428 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3430 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3431 otherwise it is not changed.
3433 *PUNSIGNEDP is set to the signedness of the field.
3435 *PMASK is set to the mask used. This is either contained in a
3436 BIT_AND_EXPR or derived from the width of the field.
3438 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3440 Return 0 if this is not a component reference or is one that we can't
3441 do anything with. */
3444 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3445 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3446 int *punsignedp, int *pvolatilep,
3447 tree *pmask, tree *pand_mask)
3449 tree outer_type = 0;
3451 tree mask, inner, offset;
3453 unsigned int precision;
3455 /* All the optimizations using this function assume integer fields.
3456 There are problems with FP fields since the type_for_size call
3457 below can fail for, e.g., XFmode. */
3458 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3461 /* We are interested in the bare arrangement of bits, so strip everything
3462 that doesn't affect the machine mode. However, record the type of the
3463 outermost expression if it may matter below. */
3464 if (TREE_CODE (exp) == NOP_EXPR
3465 || TREE_CODE (exp) == CONVERT_EXPR
3466 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3467 outer_type = TREE_TYPE (exp);
3470 if (TREE_CODE (exp) == BIT_AND_EXPR)
3472 and_mask = TREE_OPERAND (exp, 1);
3473 exp = TREE_OPERAND (exp, 0);
3474 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3475 if (TREE_CODE (and_mask) != INTEGER_CST)
3479 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3480 punsignedp, pvolatilep, false);
3481 if ((inner == exp && and_mask == 0)
3482 || *pbitsize < 0 || offset != 0
3483 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3486 /* If the number of bits in the reference is the same as the bitsize of
3487 the outer type, then the outer type gives the signedness. Otherwise
3488 (in case of a small bitfield) the signedness is unchanged. */
3489 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3490 *punsignedp = TYPE_UNSIGNED (outer_type);
3492 /* Compute the mask to access the bitfield. */
3493 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3494 precision = TYPE_PRECISION (unsigned_type);
3496 mask = build_int_cst (unsigned_type, -1);
3497 mask = force_fit_type (mask, 0, false, false);
3499 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3500 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3502 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3504 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3505 fold_convert (unsigned_type, and_mask), mask);
3508 *pand_mask = and_mask;
3512 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3516 all_ones_mask_p (tree mask, int size)
3518 tree type = TREE_TYPE (mask);
3519 unsigned int precision = TYPE_PRECISION (type);
3522 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3523 tmask = force_fit_type (tmask, 0, false, false);
3526 tree_int_cst_equal (mask,
3527 const_binop (RSHIFT_EXPR,
3528 const_binop (LSHIFT_EXPR, tmask,
3529 size_int (precision - size),
3531 size_int (precision - size), 0));
3534 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3535 represents the sign bit of EXP's type. If EXP represents a sign
3536 or zero extension, also test VAL against the unextended type.
3537 The return value is the (sub)expression whose sign bit is VAL,
3538 or NULL_TREE otherwise. */
3541 sign_bit_p (tree exp, tree val)
3543 unsigned HOST_WIDE_INT mask_lo, lo;
3544 HOST_WIDE_INT mask_hi, hi;
3548 /* Tree EXP must have an integral type. */
3549 t = TREE_TYPE (exp);
3550 if (! INTEGRAL_TYPE_P (t))
3553 /* Tree VAL must be an integer constant. */
3554 if (TREE_CODE (val) != INTEGER_CST
3555 || TREE_CONSTANT_OVERFLOW (val))
3558 width = TYPE_PRECISION (t);
3559 if (width > HOST_BITS_PER_WIDE_INT)
3561 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3564 mask_hi = ((unsigned HOST_WIDE_INT) -1
3565 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3571 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3574 mask_lo = ((unsigned HOST_WIDE_INT) -1
3575 >> (HOST_BITS_PER_WIDE_INT - width));
3578 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3579 treat VAL as if it were unsigned. */
3580 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3581 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3584 /* Handle extension from a narrower type. */
3585 if (TREE_CODE (exp) == NOP_EXPR
3586 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3587 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3592 /* Subroutine for fold_truthop: determine if an operand is simple enough
3593 to be evaluated unconditionally. */
3596 simple_operand_p (tree exp)
3598 /* Strip any conversions that don't change the machine mode. */
3601 return (CONSTANT_CLASS_P (exp)
3602 || TREE_CODE (exp) == SSA_NAME
3604 && ! TREE_ADDRESSABLE (exp)
3605 && ! TREE_THIS_VOLATILE (exp)
3606 && ! DECL_NONLOCAL (exp)
3607 /* Don't regard global variables as simple. They may be
3608 allocated in ways unknown to the compiler (shared memory,
3609 #pragma weak, etc). */
3610 && ! TREE_PUBLIC (exp)
3611 && ! DECL_EXTERNAL (exp)
3612 /* Loading a static variable is unduly expensive, but global
3613 registers aren't expensive. */
3614 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3617 /* The following functions are subroutines to fold_range_test and allow it to
3618 try to change a logical combination of comparisons into a range test.
3621 X == 2 || X == 3 || X == 4 || X == 5
3625 (unsigned) (X - 2) <= 3
3627 We describe each set of comparisons as being either inside or outside
3628 a range, using a variable named like IN_P, and then describe the
3629 range with a lower and upper bound. If one of the bounds is omitted,
3630 it represents either the highest or lowest value of the type.
3632 In the comments below, we represent a range by two numbers in brackets
3633 preceded by a "+" to designate being inside that range, or a "-" to
3634 designate being outside that range, so the condition can be inverted by
3635 flipping the prefix. An omitted bound is represented by a "-". For
3636 example, "- [-, 10]" means being outside the range starting at the lowest
3637 possible value and ending at 10, in other words, being greater than 10.
3638 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3641 We set up things so that the missing bounds are handled in a consistent
3642 manner so neither a missing bound nor "true" and "false" need to be
3643 handled using a special case. */
3645 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3646 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3647 and UPPER1_P are nonzero if the respective argument is an upper bound
3648 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3649 must be specified for a comparison. ARG1 will be converted to ARG0's
3650 type if both are specified. */
3653 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3654 tree arg1, int upper1_p)
3660 /* If neither arg represents infinity, do the normal operation.
3661 Else, if not a comparison, return infinity. Else handle the special
3662 comparison rules. Note that most of the cases below won't occur, but
3663 are handled for consistency. */
3665 if (arg0 != 0 && arg1 != 0)
3667 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3668 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3670 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3673 if (TREE_CODE_CLASS (code) != tcc_comparison)
3676 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3677 for neither. In real maths, we cannot assume open ended ranges are
3678 the same. But, this is computer arithmetic, where numbers are finite.
3679 We can therefore make the transformation of any unbounded range with
3680 the value Z, Z being greater than any representable number. This permits
3681 us to treat unbounded ranges as equal. */
3682 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3683 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3687 result = sgn0 == sgn1;
3690 result = sgn0 != sgn1;
3693 result = sgn0 < sgn1;
3696 result = sgn0 <= sgn1;
3699 result = sgn0 > sgn1;
3702 result = sgn0 >= sgn1;
3708 return constant_boolean_node (result, type);
3711 /* Given EXP, a logical expression, set the range it is testing into
3712 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3713 actually being tested. *PLOW and *PHIGH will be made of the same type
3714 as the returned expression. If EXP is not a comparison, we will most
3715 likely not be returning a useful value and range. */
3718 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3720 enum tree_code code;
3721 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3722 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3724 tree low, high, n_low, n_high;
3726 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3727 and see if we can refine the range. Some of the cases below may not
3728 happen, but it doesn't seem worth worrying about this. We "continue"
3729 the outer loop when we've changed something; otherwise we "break"
3730 the switch, which will "break" the while. */
3733 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3737 code = TREE_CODE (exp);
3738 exp_type = TREE_TYPE (exp);
3740 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3742 if (TREE_CODE_LENGTH (code) > 0)
3743 arg0 = TREE_OPERAND (exp, 0);
3744 if (TREE_CODE_CLASS (code) == tcc_comparison
3745 || TREE_CODE_CLASS (code) == tcc_unary
3746 || TREE_CODE_CLASS (code) == tcc_binary)
3747 arg0_type = TREE_TYPE (arg0);
3748 if (TREE_CODE_CLASS (code) == tcc_binary
3749 || TREE_CODE_CLASS (code) == tcc_comparison
3750 || (TREE_CODE_CLASS (code) == tcc_expression
3751 && TREE_CODE_LENGTH (code) > 1))
3752 arg1 = TREE_OPERAND (exp, 1);
3757 case TRUTH_NOT_EXPR:
3758 in_p = ! in_p, exp = arg0;
3761 case EQ_EXPR: case NE_EXPR:
3762 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3763 /* We can only do something if the range is testing for zero
3764 and if the second operand is an integer constant. Note that
3765 saying something is "in" the range we make is done by
3766 complementing IN_P since it will set in the initial case of
3767 being not equal to zero; "out" is leaving it alone. */
3768 if (low == 0 || high == 0
3769 || ! integer_zerop (low) || ! integer_zerop (high)
3770 || TREE_CODE (arg1) != INTEGER_CST)
3775 case NE_EXPR: /* - [c, c] */
3778 case EQ_EXPR: /* + [c, c] */
3779 in_p = ! in_p, low = high = arg1;
3781 case GT_EXPR: /* - [-, c] */
3782 low = 0, high = arg1;
3784 case GE_EXPR: /* + [c, -] */
3785 in_p = ! in_p, low = arg1, high = 0;
3787 case LT_EXPR: /* - [c, -] */
3788 low = arg1, high = 0;
3790 case LE_EXPR: /* + [-, c] */
3791 in_p = ! in_p, low = 0, high = arg1;
3797 /* If this is an unsigned comparison, we also know that EXP is
3798 greater than or equal to zero. We base the range tests we make
3799 on that fact, so we record it here so we can parse existing
3800 range tests. We test arg0_type since often the return type
3801 of, e.g. EQ_EXPR, is boolean. */
3802 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3804 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3806 fold_convert (arg0_type, integer_zero_node),
3810 in_p = n_in_p, low = n_low, high = n_high;
3812 /* If the high bound is missing, but we have a nonzero low
3813 bound, reverse the range so it goes from zero to the low bound
3815 if (high == 0 && low && ! integer_zerop (low))
3818 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3819 integer_one_node, 0);
3820 low = fold_convert (arg0_type, integer_zero_node);
3828 /* (-x) IN [a,b] -> x in [-b, -a] */
3829 n_low = range_binop (MINUS_EXPR, exp_type,
3830 fold_convert (exp_type, integer_zero_node),
3832 n_high = range_binop (MINUS_EXPR, exp_type,
3833 fold_convert (exp_type, integer_zero_node),
3835 low = n_low, high = n_high;
3841 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3842 fold_convert (exp_type, integer_one_node));
3845 case PLUS_EXPR: case MINUS_EXPR:
3846 if (TREE_CODE (arg1) != INTEGER_CST)
3849 /* If EXP is signed, any overflow in the computation is undefined,
3850 so we don't worry about it so long as our computations on
3851 the bounds don't overflow. For unsigned, overflow is defined
3852 and this is exactly the right thing. */
3853 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3854 arg0_type, low, 0, arg1, 0);
3855 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3856 arg0_type, high, 1, arg1, 0);
3857 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3858 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3861 /* Check for an unsigned range which has wrapped around the maximum
3862 value thus making n_high < n_low, and normalize it. */
3863 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3865 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3866 integer_one_node, 0);
3867 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3868 integer_one_node, 0);
3870 /* If the range is of the form +/- [ x+1, x ], we won't
3871 be able to normalize it. But then, it represents the
3872 whole range or the empty set, so make it
3874 if (tree_int_cst_equal (n_low, low)
3875 && tree_int_cst_equal (n_high, high))
3881 low = n_low, high = n_high;
3886 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3887 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3890 if (! INTEGRAL_TYPE_P (arg0_type)
3891 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3892 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3895 n_low = low, n_high = high;
3898 n_low = fold_convert (arg0_type, n_low);
3901 n_high = fold_convert (arg0_type, n_high);
3904 /* If we're converting arg0 from an unsigned type, to exp,
3905 a signed type, we will be doing the comparison as unsigned.
3906 The tests above have already verified that LOW and HIGH
3909 So we have to ensure that we will handle large unsigned
3910 values the same way that the current signed bounds treat
3913 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3916 tree equiv_type = lang_hooks.types.type_for_mode
3917 (TYPE_MODE (arg0_type), 1);
3919 /* A range without an upper bound is, naturally, unbounded.
3920 Since convert would have cropped a very large value, use
3921 the max value for the destination type. */
3923 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3924 : TYPE_MAX_VALUE (arg0_type);
3926 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3927 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3928 fold_convert (arg0_type,
3930 fold_convert (arg0_type,
3933 /* If the low bound is specified, "and" the range with the
3934 range for which the original unsigned value will be
3938 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3939 1, n_low, n_high, 1,
3940 fold_convert (arg0_type,
3945 in_p = (n_in_p == in_p);
3949 /* Otherwise, "or" the range with the range of the input
3950 that will be interpreted as negative. */
3951 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3952 0, n_low, n_high, 1,
3953 fold_convert (arg0_type,
3958 in_p = (in_p != n_in_p);
3963 low = n_low, high = n_high;
3973 /* If EXP is a constant, we can evaluate whether this is true or false. */
3974 if (TREE_CODE (exp) == INTEGER_CST)
3976 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3978 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3984 *pin_p = in_p, *plow = low, *phigh = high;
3988 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3989 type, TYPE, return an expression to test if EXP is in (or out of, depending
3990 on IN_P) the range. Return 0 if the test couldn't be created. */
3993 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3995 tree etype = TREE_TYPE (exp);
3998 #ifdef HAVE_canonicalize_funcptr_for_compare
3999 /* Disable this optimization for function pointer expressions
4000 on targets that require function pointer canonicalization. */
4001 if (HAVE_canonicalize_funcptr_for_compare
4002 && TREE_CODE (etype) == POINTER_TYPE
4003 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4009 value = build_range_check (type, exp, 1, low, high);
4011 return invert_truthvalue (value);
4016 if (low == 0 && high == 0)
4017 return fold_convert (type, integer_one_node);
4020 return fold_build2 (LE_EXPR, type, exp,
4021 fold_convert (etype, high));
4024 return fold_build2 (GE_EXPR, type, exp,
4025 fold_convert (etype, low));
4027 if (operand_equal_p (low, high, 0))
4028 return fold_build2 (EQ_EXPR, type, exp,
4029 fold_convert (etype, low));
4031 if (integer_zerop (low))
4033 if (! TYPE_UNSIGNED (etype))
4035 etype = lang_hooks.types.unsigned_type (etype);
4036 high = fold_convert (etype, high);
4037 exp = fold_convert (etype, exp);
4039 return build_range_check (type, exp, 1, 0, high);
4042 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4043 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4045 unsigned HOST_WIDE_INT lo;
4049 prec = TYPE_PRECISION (etype);
4050 if (prec <= HOST_BITS_PER_WIDE_INT)
4053 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4057 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4058 lo = (unsigned HOST_WIDE_INT) -1;
4061 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4063 if (TYPE_UNSIGNED (etype))
4065 etype = lang_hooks.types.signed_type (etype);
4066 exp = fold_convert (etype, exp);
4068 return fold_build2 (GT_EXPR, type, exp,
4069 fold_convert (etype, integer_zero_node));
4073 value = const_binop (MINUS_EXPR, high, low, 0);
4074 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4075 && ! TYPE_UNSIGNED (etype))
4077 tree utype, minv, maxv;
4079 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4080 for the type in question, as we rely on this here. */
4081 switch (TREE_CODE (etype))
4086 /* There is no requirement that LOW be within the range of ETYPE
4087 if the latter is a subtype. It must, however, be within the base
4088 type of ETYPE. So be sure we do the subtraction in that type. */
4089 if (TREE_TYPE (etype))
4090 etype = TREE_TYPE (etype);
4091 utype = lang_hooks.types.unsigned_type (etype);
4092 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4093 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4094 integer_one_node, 1);
4095 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4096 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4100 high = fold_convert (etype, high);
4101 low = fold_convert (etype, low);
4102 exp = fold_convert (etype, exp);
4103 value = const_binop (MINUS_EXPR, high, low, 0);
4111 if (value != 0 && ! TREE_OVERFLOW (value))
4113 /* There is no requirement that LOW be within the range of ETYPE
4114 if the latter is a subtype. It must, however, be within the base
4115 type of ETYPE. So be sure we do the subtraction in that type. */
4116 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4118 etype = TREE_TYPE (etype);
4119 exp = fold_convert (etype, exp);
4120 low = fold_convert (etype, low);
4121 value = fold_convert (etype, value);
4124 return build_range_check (type,
4125 fold_build2 (MINUS_EXPR, etype, exp, low),
4126 1, build_int_cst (etype, 0), value);
4132 /* Given two ranges, see if we can merge them into one. Return 1 if we
4133 can, 0 if we can't. Set the output range into the specified parameters. */
4136 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4137 tree high0, int in1_p, tree low1, tree high1)
4145 int lowequal = ((low0 == 0 && low1 == 0)
4146 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4147 low0, 0, low1, 0)));
4148 int highequal = ((high0 == 0 && high1 == 0)
4149 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4150 high0, 1, high1, 1)));
4152 /* Make range 0 be the range that starts first, or ends last if they
4153 start at the same value. Swap them if it isn't. */
4154 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4157 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4158 high1, 1, high0, 1))))
4160 temp = in0_p, in0_p = in1_p, in1_p = temp;
4161 tem = low0, low0 = low1, low1 = tem;
4162 tem = high0, high0 = high1, high1 = tem;
4165 /* Now flag two cases, whether the ranges are disjoint or whether the
4166 second range is totally subsumed in the first. Note that the tests
4167 below are simplified by the ones above. */
4168 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4169 high0, 1, low1, 0));
4170 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4171 high1, 1, high0, 1));
4173 /* We now have four cases, depending on whether we are including or
4174 excluding the two ranges. */
4177 /* If they don't overlap, the result is false. If the second range
4178 is a subset it is the result. Otherwise, the range is from the start
4179 of the second to the end of the first. */
4181 in_p = 0, low = high = 0;
4183 in_p = 1, low = low1, high = high1;
4185 in_p = 1, low = low1, high = high0;
4188 else if (in0_p && ! in1_p)
4190 /* If they don't overlap, the result is the first range. If they are
4191 equal, the result is false. If the second range is a subset of the
4192 first, and the ranges begin at the same place, we go from just after
4193 the end of the first range to the end of the second. If the second
4194 range is not a subset of the first, or if it is a subset and both
4195 ranges end at the same place, the range starts at the start of the
4196 first range and ends just before the second range.
4197 Otherwise, we can't describe this as a single range. */
4199 in_p = 1, low = low0, high = high0;
4200 else if (lowequal && highequal)
4201 in_p = 0, low = high = 0;
4202 else if (subset && lowequal)
4204 in_p = 1, high = high0;
4205 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4206 integer_one_node, 0);
4208 else if (! subset || highequal)
4210 in_p = 1, low = low0;
4211 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4212 integer_one_node, 0);
4218 else if (! in0_p && in1_p)
4220 /* If they don't overlap, the result is the second range. If the second
4221 is a subset of the first, the result is false. Otherwise,
4222 the range starts just after the first range and ends at the
4223 end of the second. */
4225 in_p = 1, low = low1, high = high1;
4226 else if (subset || highequal)
4227 in_p = 0, low = high = 0;
4230 in_p = 1, high = high1;
4231 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4232 integer_one_node, 0);
4238 /* The case where we are excluding both ranges. Here the complex case
4239 is if they don't overlap. In that case, the only time we have a
4240 range is if they are adjacent. If the second is a subset of the
4241 first, the result is the first. Otherwise, the range to exclude
4242 starts at the beginning of the first range and ends at the end of the
4246 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4247 range_binop (PLUS_EXPR, NULL_TREE,
4249 integer_one_node, 1),
4251 in_p = 0, low = low0, high = high1;
4254 /* Canonicalize - [min, x] into - [-, x]. */
4255 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4256 switch (TREE_CODE (TREE_TYPE (low0)))
4259 if (TYPE_PRECISION (TREE_TYPE (low0))
4260 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4265 if (tree_int_cst_equal (low0,
4266 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4270 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4271 && integer_zerop (low0))
4278 /* Canonicalize - [x, max] into - [x, -]. */
4279 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4280 switch (TREE_CODE (TREE_TYPE (high1)))
4283 if (TYPE_PRECISION (TREE_TYPE (high1))
4284 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4289 if (tree_int_cst_equal (high1,
4290 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4294 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4295 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4297 integer_one_node, 1)))
4304 /* The ranges might be also adjacent between the maximum and
4305 minimum values of the given type. For
4306 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4307 return + [x + 1, y - 1]. */
4308 if (low0 == 0 && high1 == 0)
4310 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4311 integer_one_node, 1);
4312 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4313 integer_one_node, 0);
4314 if (low == 0 || high == 0)
4324 in_p = 0, low = low0, high = high0;
4326 in_p = 0, low = low0, high = high1;
4329 *pin_p = in_p, *plow = low, *phigh = high;
4334 /* Subroutine of fold, looking inside expressions of the form
4335 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4336 of the COND_EXPR. This function is being used also to optimize
4337 A op B ? C : A, by reversing the comparison first.
4339 Return a folded expression whose code is not a COND_EXPR
4340 anymore, or NULL_TREE if no folding opportunity is found. */
4343 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4345 enum tree_code comp_code = TREE_CODE (arg0);
4346 tree arg00 = TREE_OPERAND (arg0, 0);
4347 tree arg01 = TREE_OPERAND (arg0, 1);
4348 tree arg1_type = TREE_TYPE (arg1);
4354 /* If we have A op 0 ? A : -A, consider applying the following
4357 A == 0? A : -A same as -A
4358 A != 0? A : -A same as A
4359 A >= 0? A : -A same as abs (A)
4360 A > 0? A : -A same as abs (A)
4361 A <= 0? A : -A same as -abs (A)
4362 A < 0? A : -A same as -abs (A)
4364 None of these transformations work for modes with signed
4365 zeros. If A is +/-0, the first two transformations will
4366 change the sign of the result (from +0 to -0, or vice
4367 versa). The last four will fix the sign of the result,
4368 even though the original expressions could be positive or
4369 negative, depending on the sign of A.
4371 Note that all these transformations are correct if A is
4372 NaN, since the two alternatives (A and -A) are also NaNs. */
4373 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4374 ? real_zerop (arg01)
4375 : integer_zerop (arg01))
4376 && ((TREE_CODE (arg2) == NEGATE_EXPR
4377 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4378 /* In the case that A is of the form X-Y, '-A' (arg2) may
4379 have already been folded to Y-X, check for that. */
4380 || (TREE_CODE (arg1) == MINUS_EXPR
4381 && TREE_CODE (arg2) == MINUS_EXPR
4382 && operand_equal_p (TREE_OPERAND (arg1, 0),
4383 TREE_OPERAND (arg2, 1), 0)
4384 && operand_equal_p (TREE_OPERAND (arg1, 1),
4385 TREE_OPERAND (arg2, 0), 0))))
4390 tem = fold_convert (arg1_type, arg1);
4391 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4394 return pedantic_non_lvalue (fold_convert (type, arg1));
4397 if (flag_trapping_math)
4402 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4403 arg1 = fold_convert (lang_hooks.types.signed_type
4404 (TREE_TYPE (arg1)), arg1);
4405 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4406 return pedantic_non_lvalue (fold_convert (type, tem));
4409 if (flag_trapping_math)
4413 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4414 arg1 = fold_convert (lang_hooks.types.signed_type
4415 (TREE_TYPE (arg1)), arg1);
4416 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4417 return negate_expr (fold_convert (type, tem));
4419 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4423 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4424 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4425 both transformations are correct when A is NaN: A != 0
4426 is then true, and A == 0 is false. */
4428 if (integer_zerop (arg01) && integer_zerop (arg2))
4430 if (comp_code == NE_EXPR)
4431 return pedantic_non_lvalue (fold_convert (type, arg1));
4432 else if (comp_code == EQ_EXPR)
4433 return fold_convert (type, integer_zero_node);
4436 /* Try some transformations of A op B ? A : B.
4438 A == B? A : B same as B
4439 A != B? A : B same as A
4440 A >= B? A : B same as max (A, B)
4441 A > B? A : B same as max (B, A)
4442 A <= B? A : B same as min (A, B)
4443 A < B? A : B same as min (B, A)
4445 As above, these transformations don't work in the presence
4446 of signed zeros. For example, if A and B are zeros of
4447 opposite sign, the first two transformations will change
4448 the sign of the result. In the last four, the original
4449 expressions give different results for (A=+0, B=-0) and
4450 (A=-0, B=+0), but the transformed expressions do not.
4452 The first two transformations are correct if either A or B
4453 is a NaN. In the first transformation, the condition will
4454 be false, and B will indeed be chosen. In the case of the
4455 second transformation, the condition A != B will be true,
4456 and A will be chosen.
4458 The conversions to max() and min() are not correct if B is
4459 a number and A is not. The conditions in the original
4460 expressions will be false, so all four give B. The min()
4461 and max() versions would give a NaN instead. */
4462 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4463 /* Avoid these transformations if the COND_EXPR may be used
4464 as an lvalue in the C++ front-end. PR c++/19199. */
4466 || strcmp (lang_hooks.name, "GNU C++") != 0
4467 || ! maybe_lvalue_p (arg1)
4468 || ! maybe_lvalue_p (arg2)))
4470 tree comp_op0 = arg00;
4471 tree comp_op1 = arg01;
4472 tree comp_type = TREE_TYPE (comp_op0);
4474 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4475 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4485 return pedantic_non_lvalue (fold_convert (type, arg2));
4487 return pedantic_non_lvalue (fold_convert (type, arg1));
4492 /* In C++ a ?: expression can be an lvalue, so put the
4493 operand which will be used if they are equal first
4494 so that we can convert this back to the
4495 corresponding COND_EXPR. */
4496 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4498 comp_op0 = fold_convert (comp_type, comp_op0);
4499 comp_op1 = fold_convert (comp_type, comp_op1);
4500 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4501 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4502 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4503 return pedantic_non_lvalue (fold_convert (type, tem));
4510 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4512 comp_op0 = fold_convert (comp_type, comp_op0);
4513 comp_op1 = fold_convert (comp_type, comp_op1);
4514 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4515 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4516 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4517 return pedantic_non_lvalue (fold_convert (type, tem));
4521 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4522 return pedantic_non_lvalue (fold_convert (type, arg2));
4525 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4526 return pedantic_non_lvalue (fold_convert (type, arg1));
4529 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4534 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4535 we might still be able to simplify this. For example,
4536 if C1 is one less or one more than C2, this might have started
4537 out as a MIN or MAX and been transformed by this function.
4538 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4540 if (INTEGRAL_TYPE_P (type)
4541 && TREE_CODE (arg01) == INTEGER_CST
4542 && TREE_CODE (arg2) == INTEGER_CST)
4546 /* We can replace A with C1 in this case. */
4547 arg1 = fold_convert (type, arg01);
4548 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4551 /* If C1 is C2 + 1, this is min(A, C2). */
4552 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4554 && operand_equal_p (arg01,
4555 const_binop (PLUS_EXPR, arg2,
4556 integer_one_node, 0),
4558 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4563 /* If C1 is C2 - 1, this is min(A, C2). */
4564 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4566 && operand_equal_p (arg01,
4567 const_binop (MINUS_EXPR, arg2,
4568 integer_one_node, 0),
4570 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4575 /* If C1 is C2 - 1, this is max(A, C2). */
4576 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4578 && operand_equal_p (arg01,
4579 const_binop (MINUS_EXPR, arg2,
4580 integer_one_node, 0),
4582 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4587 /* If C1 is C2 + 1, this is max(A, C2). */
4588 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4590 && operand_equal_p (arg01,
4591 const_binop (PLUS_EXPR, arg2,
4592 integer_one_node, 0),
4594 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4608 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4609 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4612 /* EXP is some logical combination of boolean tests. See if we can
4613 merge it into some range test. Return the new tree if so. */
4616 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4618 int or_op = (code == TRUTH_ORIF_EXPR
4619 || code == TRUTH_OR_EXPR);
4620 int in0_p, in1_p, in_p;
4621 tree low0, low1, low, high0, high1, high;
4622 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4623 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4626 /* If this is an OR operation, invert both sides; we will invert
4627 again at the end. */
4629 in0_p = ! in0_p, in1_p = ! in1_p;
4631 /* If both expressions are the same, if we can merge the ranges, and we
4632 can build the range test, return it or it inverted. If one of the
4633 ranges is always true or always false, consider it to be the same
4634 expression as the other. */
4635 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4636 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4638 && 0 != (tem = (build_range_check (type,
4640 : rhs != 0 ? rhs : integer_zero_node,
4642 return or_op ? invert_truthvalue (tem) : tem;
4644 /* On machines where the branch cost is expensive, if this is a
4645 short-circuited branch and the underlying object on both sides
4646 is the same, make a non-short-circuit operation. */
4647 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4648 && lhs != 0 && rhs != 0
4649 && (code == TRUTH_ANDIF_EXPR
4650 || code == TRUTH_ORIF_EXPR)
4651 && operand_equal_p (lhs, rhs, 0))
4653 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4654 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4655 which cases we can't do this. */
4656 if (simple_operand_p (lhs))
4657 return build2 (code == TRUTH_ANDIF_EXPR
4658 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4661 else if (lang_hooks.decls.global_bindings_p () == 0
4662 && ! CONTAINS_PLACEHOLDER_P (lhs))
4664 tree common = save_expr (lhs);
4666 if (0 != (lhs = build_range_check (type, common,
4667 or_op ? ! in0_p : in0_p,
4669 && (0 != (rhs = build_range_check (type, common,
4670 or_op ? ! in1_p : in1_p,
4672 return build2 (code == TRUTH_ANDIF_EXPR
4673 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4681 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4682 bit value. Arrange things so the extra bits will be set to zero if and
4683 only if C is signed-extended to its full width. If MASK is nonzero,
4684 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4687 unextend (tree c, int p, int unsignedp, tree mask)
4689 tree type = TREE_TYPE (c);
4690 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4693 if (p == modesize || unsignedp)
4696 /* We work by getting just the sign bit into the low-order bit, then
4697 into the high-order bit, then sign-extend. We then XOR that value
4699 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4700 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4702 /* We must use a signed type in order to get an arithmetic right shift.
4703 However, we must also avoid introducing accidental overflows, so that
4704 a subsequent call to integer_zerop will work. Hence we must
4705 do the type conversion here. At this point, the constant is either
4706 zero or one, and the conversion to a signed type can never overflow.
4707 We could get an overflow if this conversion is done anywhere else. */
4708 if (TYPE_UNSIGNED (type))
4709 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4711 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4712 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4714 temp = const_binop (BIT_AND_EXPR, temp,
4715 fold_convert (TREE_TYPE (c), mask), 0);
4716 /* If necessary, convert the type back to match the type of C. */
4717 if (TYPE_UNSIGNED (type))
4718 temp = fold_convert (type, temp);
4720 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4723 /* Find ways of folding logical expressions of LHS and RHS:
4724 Try to merge two comparisons to the same innermost item.
4725 Look for range tests like "ch >= '0' && ch <= '9'".
4726 Look for combinations of simple terms on machines with expensive branches
4727 and evaluate the RHS unconditionally.
4729 For example, if we have p->a == 2 && p->b == 4 and we can make an
4730 object large enough to span both A and B, we can do this with a comparison
4731 against the object ANDed with the a mask.
4733 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4734 operations to do this with one comparison.
4736 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4737 function and the one above.
4739 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4740 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4742 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4745 We return the simplified tree or 0 if no optimization is possible. */
4748 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4750 /* If this is the "or" of two comparisons, we can do something if
4751 the comparisons are NE_EXPR. If this is the "and", we can do something
4752 if the comparisons are EQ_EXPR. I.e.,
4753 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4755 WANTED_CODE is this operation code. For single bit fields, we can
4756 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4757 comparison for one-bit fields. */
4759 enum tree_code wanted_code;
4760 enum tree_code lcode, rcode;
4761 tree ll_arg, lr_arg, rl_arg, rr_arg;
4762 tree ll_inner, lr_inner, rl_inner, rr_inner;
4763 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4764 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4765 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4766 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4767 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4768 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4769 enum machine_mode lnmode, rnmode;
4770 tree ll_mask, lr_mask, rl_mask, rr_mask;
4771 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4772 tree l_const, r_const;
4773 tree lntype, rntype, result;
4774 int first_bit, end_bit;
4777 /* Start by getting the comparison codes. Fail if anything is volatile.
4778 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4779 it were surrounded with a NE_EXPR. */
4781 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4784 lcode = TREE_CODE (lhs);
4785 rcode = TREE_CODE (rhs);
4787 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4789 lhs = build2 (NE_EXPR, truth_type, lhs,
4790 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4794 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4796 rhs = build2 (NE_EXPR, truth_type, rhs,
4797 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4801 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4802 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4805 ll_arg = TREE_OPERAND (lhs, 0);
4806 lr_arg = TREE_OPERAND (lhs, 1);
4807 rl_arg = TREE_OPERAND (rhs, 0);
4808 rr_arg = TREE_OPERAND (rhs, 1);
4810 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4811 if (simple_operand_p (ll_arg)
4812 && simple_operand_p (lr_arg))
4815 if (operand_equal_p (ll_arg, rl_arg, 0)
4816 && operand_equal_p (lr_arg, rr_arg, 0))
4818 result = combine_comparisons (code, lcode, rcode,
4819 truth_type, ll_arg, lr_arg);
4823 else if (operand_equal_p (ll_arg, rr_arg, 0)
4824 && operand_equal_p (lr_arg, rl_arg, 0))
4826 result = combine_comparisons (code, lcode,
4827 swap_tree_comparison (rcode),
4828 truth_type, ll_arg, lr_arg);
4834 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4835 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4837 /* If the RHS can be evaluated unconditionally and its operands are
4838 simple, it wins to evaluate the RHS unconditionally on machines
4839 with expensive branches. In this case, this isn't a comparison
4840 that can be merged. Avoid doing this if the RHS is a floating-point
4841 comparison since those can trap. */
4843 if (BRANCH_COST >= 2
4844 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4845 && simple_operand_p (rl_arg)
4846 && simple_operand_p (rr_arg))
4848 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4849 if (code == TRUTH_OR_EXPR
4850 && lcode == NE_EXPR && integer_zerop (lr_arg)
4851 && rcode == NE_EXPR && integer_zerop (rr_arg)
4852 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4853 return build2 (NE_EXPR, truth_type,
4854 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4856 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4858 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4859 if (code == TRUTH_AND_EXPR
4860 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4861 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4862 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4863 return build2 (EQ_EXPR, truth_type,
4864 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4866 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4868 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4869 return build2 (code, truth_type, lhs, rhs);
4872 /* See if the comparisons can be merged. Then get all the parameters for
4875 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4876 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4880 ll_inner = decode_field_reference (ll_arg,
4881 &ll_bitsize, &ll_bitpos, &ll_mode,
4882 &ll_unsignedp, &volatilep, &ll_mask,
4884 lr_inner = decode_field_reference (lr_arg,
4885 &lr_bitsize, &lr_bitpos, &lr_mode,
4886 &lr_unsignedp, &volatilep, &lr_mask,
4888 rl_inner = decode_field_reference (rl_arg,
4889 &rl_bitsize, &rl_bitpos, &rl_mode,
4890 &rl_unsignedp, &volatilep, &rl_mask,
4892 rr_inner = decode_field_reference (rr_arg,
4893 &rr_bitsize, &rr_bitpos, &rr_mode,
4894 &rr_unsignedp, &volatilep, &rr_mask,
4897 /* It must be true that the inner operation on the lhs of each
4898 comparison must be the same if we are to be able to do anything.
4899 Then see if we have constants. If not, the same must be true for
4901 if (volatilep || ll_inner == 0 || rl_inner == 0
4902 || ! operand_equal_p (ll_inner, rl_inner, 0))
4905 if (TREE_CODE (lr_arg) == INTEGER_CST
4906 && TREE_CODE (rr_arg) == INTEGER_CST)
4907 l_const = lr_arg, r_const = rr_arg;
4908 else if (lr_inner == 0 || rr_inner == 0
4909 || ! operand_equal_p (lr_inner, rr_inner, 0))
4912 l_const = r_const = 0;
4914 /* If either comparison code is not correct for our logical operation,
4915 fail. However, we can convert a one-bit comparison against zero into
4916 the opposite comparison against that bit being set in the field. */
4918 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4919 if (lcode != wanted_code)
4921 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4923 /* Make the left operand unsigned, since we are only interested
4924 in the value of one bit. Otherwise we are doing the wrong
4933 /* This is analogous to the code for l_const above. */
4934 if (rcode != wanted_code)
4936 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4945 /* After this point all optimizations will generate bit-field
4946 references, which we might not want. */
4947 if (! lang_hooks.can_use_bit_fields_p ())
4950 /* See if we can find a mode that contains both fields being compared on
4951 the left. If we can't, fail. Otherwise, update all constants and masks
4952 to be relative to a field of that size. */
4953 first_bit = MIN (ll_bitpos, rl_bitpos);
4954 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4955 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4956 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4958 if (lnmode == VOIDmode)
4961 lnbitsize = GET_MODE_BITSIZE (lnmode);
4962 lnbitpos = first_bit & ~ (lnbitsize - 1);
4963 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4964 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4966 if (BYTES_BIG_ENDIAN)
4968 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4969 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4972 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4973 size_int (xll_bitpos), 0);
4974 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4975 size_int (xrl_bitpos), 0);
4979 l_const = fold_convert (lntype, l_const);
4980 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4981 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4982 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4983 fold_build1 (BIT_NOT_EXPR,
4987 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4989 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4994 r_const = fold_convert (lntype, r_const);
4995 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4996 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4997 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4998 fold_build1 (BIT_NOT_EXPR,
5002 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5004 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5008 /* If the right sides are not constant, do the same for it. Also,
5009 disallow this optimization if a size or signedness mismatch occurs
5010 between the left and right sides. */
5013 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5014 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5015 /* Make sure the two fields on the right
5016 correspond to the left without being swapped. */
5017 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5020 first_bit = MIN (lr_bitpos, rr_bitpos);
5021 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5022 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5023 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5025 if (rnmode == VOIDmode)
5028 rnbitsize = GET_MODE_BITSIZE (rnmode);
5029 rnbitpos = first_bit & ~ (rnbitsize - 1);
5030 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5031 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5033 if (BYTES_BIG_ENDIAN)
5035 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5036 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5039 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5040 size_int (xlr_bitpos), 0);
5041 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5042 size_int (xrr_bitpos), 0);
5044 /* Make a mask that corresponds to both fields being compared.
5045 Do this for both items being compared. If the operands are the
5046 same size and the bits being compared are in the same position
5047 then we can do this by masking both and comparing the masked
5049 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5050 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5051 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5053 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5054 ll_unsignedp || rl_unsignedp);
5055 if (! all_ones_mask_p (ll_mask, lnbitsize))
5056 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5058 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5059 lr_unsignedp || rr_unsignedp);
5060 if (! all_ones_mask_p (lr_mask, rnbitsize))
5061 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5063 return build2 (wanted_code, truth_type, lhs, rhs);
5066 /* There is still another way we can do something: If both pairs of
5067 fields being compared are adjacent, we may be able to make a wider
5068 field containing them both.
5070 Note that we still must mask the lhs/rhs expressions. Furthermore,
5071 the mask must be shifted to account for the shift done by
5072 make_bit_field_ref. */
5073 if ((ll_bitsize + ll_bitpos == rl_bitpos
5074 && lr_bitsize + lr_bitpos == rr_bitpos)
5075 || (ll_bitpos == rl_bitpos + rl_bitsize
5076 && lr_bitpos == rr_bitpos + rr_bitsize))
5080 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5081 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5082 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5083 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5085 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5086 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5087 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5088 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5090 /* Convert to the smaller type before masking out unwanted bits. */
5092 if (lntype != rntype)
5094 if (lnbitsize > rnbitsize)
5096 lhs = fold_convert (rntype, lhs);
5097 ll_mask = fold_convert (rntype, ll_mask);
5100 else if (lnbitsize < rnbitsize)
5102 rhs = fold_convert (lntype, rhs);
5103 lr_mask = fold_convert (lntype, lr_mask);
5108 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5109 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5111 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5112 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5114 return build2 (wanted_code, truth_type, lhs, rhs);
5120 /* Handle the case of comparisons with constants. If there is something in
5121 common between the masks, those bits of the constants must be the same.
5122 If not, the condition is always false. Test for this to avoid generating
5123 incorrect code below. */
5124 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5125 if (! integer_zerop (result)
5126 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5127 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5129 if (wanted_code == NE_EXPR)
5131 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5132 return constant_boolean_node (true, truth_type);
5136 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5137 return constant_boolean_node (false, truth_type);
5141 /* Construct the expression we will return. First get the component
5142 reference we will make. Unless the mask is all ones the width of
5143 that field, perform the mask operation. Then compare with the
5145 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5146 ll_unsignedp || rl_unsignedp);
5148 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5149 if (! all_ones_mask_p (ll_mask, lnbitsize))
5150 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5152 return build2 (wanted_code, truth_type, result,
5153 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5156 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5160 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5163 enum tree_code op_code;
5164 tree comp_const = op1;
5166 int consts_equal, consts_lt;
5169 STRIP_SIGN_NOPS (arg0);
5171 op_code = TREE_CODE (arg0);
5172 minmax_const = TREE_OPERAND (arg0, 1);
5173 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5174 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5175 inner = TREE_OPERAND (arg0, 0);
5177 /* If something does not permit us to optimize, return the original tree. */
5178 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5179 || TREE_CODE (comp_const) != INTEGER_CST
5180 || TREE_CONSTANT_OVERFLOW (comp_const)
5181 || TREE_CODE (minmax_const) != INTEGER_CST
5182 || TREE_CONSTANT_OVERFLOW (minmax_const))
5185 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5186 and GT_EXPR, doing the rest with recursive calls using logical
5190 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5192 /* FIXME: We should be able to invert code without building a
5193 scratch tree node, but doing so would require us to
5194 duplicate a part of invert_truthvalue here. */
5195 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5196 tem = optimize_minmax_comparison (TREE_CODE (tem),
5198 TREE_OPERAND (tem, 0),
5199 TREE_OPERAND (tem, 1));
5200 return invert_truthvalue (tem);
5205 fold_build2 (TRUTH_ORIF_EXPR, type,
5206 optimize_minmax_comparison
5207 (EQ_EXPR, type, arg0, comp_const),
5208 optimize_minmax_comparison
5209 (GT_EXPR, type, arg0, comp_const));
5212 if (op_code == MAX_EXPR && consts_equal)
5213 /* MAX (X, 0) == 0 -> X <= 0 */
5214 return fold_build2 (LE_EXPR, type, inner, comp_const);
5216 else if (op_code == MAX_EXPR && consts_lt)
5217 /* MAX (X, 0) == 5 -> X == 5 */
5218 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5220 else if (op_code == MAX_EXPR)
5221 /* MAX (X, 0) == -1 -> false */
5222 return omit_one_operand (type, integer_zero_node, inner);
5224 else if (consts_equal)
5225 /* MIN (X, 0) == 0 -> X >= 0 */
5226 return fold_build2 (GE_EXPR, type, inner, comp_const);
5229 /* MIN (X, 0) == 5 -> false */
5230 return omit_one_operand (type, integer_zero_node, inner);
5233 /* MIN (X, 0) == -1 -> X == -1 */
5234 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5237 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5238 /* MAX (X, 0) > 0 -> X > 0
5239 MAX (X, 0) > 5 -> X > 5 */
5240 return fold_build2 (GT_EXPR, type, inner, comp_const);
5242 else if (op_code == MAX_EXPR)
5243 /* MAX (X, 0) > -1 -> true */
5244 return omit_one_operand (type, integer_one_node, inner);
5246 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5247 /* MIN (X, 0) > 0 -> false
5248 MIN (X, 0) > 5 -> false */
5249 return omit_one_operand (type, integer_zero_node, inner);
5252 /* MIN (X, 0) > -1 -> X > -1 */
5253 return fold_build2 (GT_EXPR, type, inner, comp_const);
5260 /* T is an integer expression that is being multiplied, divided, or taken a
5261 modulus (CODE says which and what kind of divide or modulus) by a
5262 constant C. See if we can eliminate that operation by folding it with
5263 other operations already in T. WIDE_TYPE, if non-null, is a type that
5264 should be used for the computation if wider than our type.
5266 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5267 (X * 2) + (Y * 4). We must, however, be assured that either the original
5268 expression would not overflow or that overflow is undefined for the type
5269 in the language in question.
5271 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5272 the machine has a multiply-accumulate insn or that this is part of an
5273 addressing calculation.
5275 If we return a non-null expression, it is an equivalent form of the
5276 original computation, but need not be in the original type. */
5279 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5281 /* To avoid exponential search depth, refuse to allow recursion past
5282 three levels. Beyond that (1) it's highly unlikely that we'll find
5283 something interesting and (2) we've probably processed it before
5284 when we built the inner expression. */
5293 ret = extract_muldiv_1 (t, c, code, wide_type);
5300 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5302 tree type = TREE_TYPE (t);
5303 enum tree_code tcode = TREE_CODE (t);
5304 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5305 > GET_MODE_SIZE (TYPE_MODE (type)))
5306 ? wide_type : type);
5308 int same_p = tcode == code;
5309 tree op0 = NULL_TREE, op1 = NULL_TREE;
5311 /* Don't deal with constants of zero here; they confuse the code below. */
5312 if (integer_zerop (c))
5315 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5316 op0 = TREE_OPERAND (t, 0);
5318 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5319 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5321 /* Note that we need not handle conditional operations here since fold
5322 already handles those cases. So just do arithmetic here. */
5326 /* For a constant, we can always simplify if we are a multiply
5327 or (for divide and modulus) if it is a multiple of our constant. */
5328 if (code == MULT_EXPR
5329 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5330 return const_binop (code, fold_convert (ctype, t),
5331 fold_convert (ctype, c), 0);
5334 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5335 /* If op0 is an expression ... */
5336 if ((COMPARISON_CLASS_P (op0)
5337 || UNARY_CLASS_P (op0)
5338 || BINARY_CLASS_P (op0)
5339 || EXPRESSION_CLASS_P (op0))
5340 /* ... and is unsigned, and its type is smaller than ctype,
5341 then we cannot pass through as widening. */
5342 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5343 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5344 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5345 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5346 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5347 /* ... or this is a truncation (t is narrower than op0),
5348 then we cannot pass through this narrowing. */
5349 || (GET_MODE_SIZE (TYPE_MODE (type))
5350 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5351 /* ... or signedness changes for division or modulus,
5352 then we cannot pass through this conversion. */
5353 || (code != MULT_EXPR
5354 && (TYPE_UNSIGNED (ctype)
5355 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5358 /* Pass the constant down and see if we can make a simplification. If
5359 we can, replace this expression with the inner simplification for
5360 possible later conversion to our or some other type. */
5361 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5362 && TREE_CODE (t2) == INTEGER_CST
5363 && ! TREE_CONSTANT_OVERFLOW (t2)
5364 && (0 != (t1 = extract_muldiv (op0, t2, code,
5366 ? ctype : NULL_TREE))))
5371 /* If widening the type changes it from signed to unsigned, then we
5372 must avoid building ABS_EXPR itself as unsigned. */
5373 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5375 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5376 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5378 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5379 return fold_convert (ctype, t1);
5385 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5386 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5389 case MIN_EXPR: case MAX_EXPR:
5390 /* If widening the type changes the signedness, then we can't perform
5391 this optimization as that changes the result. */
5392 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5395 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5396 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5397 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5399 if (tree_int_cst_sgn (c) < 0)
5400 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5402 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5403 fold_convert (ctype, t2));
5407 case LSHIFT_EXPR: case RSHIFT_EXPR:
5408 /* If the second operand is constant, this is a multiplication
5409 or floor division, by a power of two, so we can treat it that
5410 way unless the multiplier or divisor overflows. Signed
5411 left-shift overflow is implementation-defined rather than
5412 undefined in C90, so do not convert signed left shift into
5414 if (TREE_CODE (op1) == INTEGER_CST
5415 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5416 /* const_binop may not detect overflow correctly,
5417 so check for it explicitly here. */
5418 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5419 && TREE_INT_CST_HIGH (op1) == 0
5420 && 0 != (t1 = fold_convert (ctype,
5421 const_binop (LSHIFT_EXPR,
5424 && ! TREE_OVERFLOW (t1))
5425 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5426 ? MULT_EXPR : FLOOR_DIV_EXPR,
5427 ctype, fold_convert (ctype, op0), t1),
5428 c, code, wide_type);
5431 case PLUS_EXPR: case MINUS_EXPR:
5432 /* See if we can eliminate the operation on both sides. If we can, we
5433 can return a new PLUS or MINUS. If we can't, the only remaining
5434 cases where we can do anything are if the second operand is a
5436 t1 = extract_muldiv (op0, c, code, wide_type);
5437 t2 = extract_muldiv (op1, c, code, wide_type);
5438 if (t1 != 0 && t2 != 0
5439 && (code == MULT_EXPR
5440 /* If not multiplication, we can only do this if both operands
5441 are divisible by c. */
5442 || (multiple_of_p (ctype, op0, c)
5443 && multiple_of_p (ctype, op1, c))))
5444 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5445 fold_convert (ctype, t2));
5447 /* If this was a subtraction, negate OP1 and set it to be an addition.
5448 This simplifies the logic below. */
5449 if (tcode == MINUS_EXPR)
5450 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5452 if (TREE_CODE (op1) != INTEGER_CST)
5455 /* If either OP1 or C are negative, this optimization is not safe for
5456 some of the division and remainder types while for others we need
5457 to change the code. */
5458 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5460 if (code == CEIL_DIV_EXPR)
5461 code = FLOOR_DIV_EXPR;
5462 else if (code == FLOOR_DIV_EXPR)
5463 code = CEIL_DIV_EXPR;
5464 else if (code != MULT_EXPR
5465 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5469 /* If it's a multiply or a division/modulus operation of a multiple
5470 of our constant, do the operation and verify it doesn't overflow. */
5471 if (code == MULT_EXPR
5472 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5474 op1 = const_binop (code, fold_convert (ctype, op1),
5475 fold_convert (ctype, c), 0);
5476 /* We allow the constant to overflow with wrapping semantics. */
5478 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5484 /* If we have an unsigned type is not a sizetype, we cannot widen
5485 the operation since it will change the result if the original
5486 computation overflowed. */
5487 if (TYPE_UNSIGNED (ctype)
5488 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5492 /* If we were able to eliminate our operation from the first side,
5493 apply our operation to the second side and reform the PLUS. */
5494 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5495 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5497 /* The last case is if we are a multiply. In that case, we can
5498 apply the distributive law to commute the multiply and addition
5499 if the multiplication of the constants doesn't overflow. */
5500 if (code == MULT_EXPR)
5501 return fold_build2 (tcode, ctype,
5502 fold_build2 (code, ctype,
5503 fold_convert (ctype, op0),
5504 fold_convert (ctype, c)),
5510 /* We have a special case here if we are doing something like
5511 (C * 8) % 4 since we know that's zero. */
5512 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5513 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5514 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5515 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5516 return omit_one_operand (type, integer_zero_node, op0);
5518 /* ... fall through ... */
5520 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5521 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5522 /* If we can extract our operation from the LHS, do so and return a
5523 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5524 do something only if the second operand is a constant. */
5526 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5527 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5528 fold_convert (ctype, op1));
5529 else if (tcode == MULT_EXPR && code == MULT_EXPR
5530 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5531 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5532 fold_convert (ctype, t1));
5533 else if (TREE_CODE (op1) != INTEGER_CST)
5536 /* If these are the same operation types, we can associate them
5537 assuming no overflow. */
5539 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5540 fold_convert (ctype, c), 0))
5541 && ! TREE_OVERFLOW (t1))
5542 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5544 /* If these operations "cancel" each other, we have the main
5545 optimizations of this pass, which occur when either constant is a
5546 multiple of the other, in which case we replace this with either an
5547 operation or CODE or TCODE.
5549 If we have an unsigned type that is not a sizetype, we cannot do
5550 this since it will change the result if the original computation
5552 if ((! TYPE_UNSIGNED (ctype)
5553 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5555 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5556 || (tcode == MULT_EXPR
5557 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5558 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5560 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5561 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5562 fold_convert (ctype,
5563 const_binop (TRUNC_DIV_EXPR,
5565 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5566 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5567 fold_convert (ctype,
5568 const_binop (TRUNC_DIV_EXPR,
5580 /* Return a node which has the indicated constant VALUE (either 0 or
5581 1), and is of the indicated TYPE. */
5584 constant_boolean_node (int value, tree type)
5586 if (type == integer_type_node)
5587 return value ? integer_one_node : integer_zero_node;
5588 else if (type == boolean_type_node)
5589 return value ? boolean_true_node : boolean_false_node;
5591 return build_int_cst (type, value);
5595 /* Return true if expr looks like an ARRAY_REF and set base and
5596 offset to the appropriate trees. If there is no offset,
5597 offset is set to NULL_TREE. Base will be canonicalized to
5598 something you can get the element type from using
5599 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5600 in bytes to the base. */
5603 extract_array_ref (tree expr, tree *base, tree *offset)
5605 /* One canonical form is a PLUS_EXPR with the first
5606 argument being an ADDR_EXPR with a possible NOP_EXPR
5608 if (TREE_CODE (expr) == PLUS_EXPR)
5610 tree op0 = TREE_OPERAND (expr, 0);
5611 tree inner_base, dummy1;
5612 /* Strip NOP_EXPRs here because the C frontends and/or
5613 folders present us (int *)&x.a + 4B possibly. */
5615 if (extract_array_ref (op0, &inner_base, &dummy1))
5618 if (dummy1 == NULL_TREE)
5619 *offset = TREE_OPERAND (expr, 1);
5621 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5622 dummy1, TREE_OPERAND (expr, 1));
5626 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5627 which we transform into an ADDR_EXPR with appropriate
5628 offset. For other arguments to the ADDR_EXPR we assume
5629 zero offset and as such do not care about the ADDR_EXPR
5630 type and strip possible nops from it. */
5631 else if (TREE_CODE (expr) == ADDR_EXPR)
5633 tree op0 = TREE_OPERAND (expr, 0);
5634 if (TREE_CODE (op0) == ARRAY_REF)
5636 tree idx = TREE_OPERAND (op0, 1);
5637 *base = TREE_OPERAND (op0, 0);
5638 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5639 array_ref_element_size (op0));
5643 /* Handle array-to-pointer decay as &a. */
5644 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5645 *base = TREE_OPERAND (expr, 0);
5648 *offset = NULL_TREE;
5652 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5653 else if (SSA_VAR_P (expr)
5654 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5657 *offset = NULL_TREE;
5665 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5666 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5667 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5668 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5669 COND is the first argument to CODE; otherwise (as in the example
5670 given here), it is the second argument. TYPE is the type of the
5671 original expression. Return NULL_TREE if no simplification is
5675 fold_binary_op_with_conditional_arg (enum tree_code code,
5676 tree type, tree op0, tree op1,
5677 tree cond, tree arg, int cond_first_p)
5679 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5680 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5681 tree test, true_value, false_value;
5682 tree lhs = NULL_TREE;
5683 tree rhs = NULL_TREE;
5685 /* This transformation is only worthwhile if we don't have to wrap
5686 arg in a SAVE_EXPR, and the operation can be simplified on at least
5687 one of the branches once its pushed inside the COND_EXPR. */
5688 if (!TREE_CONSTANT (arg))
5691 if (TREE_CODE (cond) == COND_EXPR)
5693 test = TREE_OPERAND (cond, 0);
5694 true_value = TREE_OPERAND (cond, 1);
5695 false_value = TREE_OPERAND (cond, 2);
5696 /* If this operand throws an expression, then it does not make
5697 sense to try to perform a logical or arithmetic operation
5699 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5701 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5706 tree testtype = TREE_TYPE (cond);
5708 true_value = constant_boolean_node (true, testtype);
5709 false_value = constant_boolean_node (false, testtype);
5712 arg = fold_convert (arg_type, arg);
5715 true_value = fold_convert (cond_type, true_value);
5717 lhs = fold_build2 (code, type, true_value, arg);
5719 lhs = fold_build2 (code, type, arg, true_value);
5723 false_value = fold_convert (cond_type, false_value);
5725 rhs = fold_build2 (code, type, false_value, arg);
5727 rhs = fold_build2 (code, type, arg, false_value);
5730 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5731 return fold_convert (type, test);
5735 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5737 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5738 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5739 ADDEND is the same as X.
5741 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5742 and finite. The problematic cases are when X is zero, and its mode
5743 has signed zeros. In the case of rounding towards -infinity,
5744 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5745 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5748 fold_real_zero_addition_p (tree type, tree addend, int negate)
5750 if (!real_zerop (addend))
5753 /* Don't allow the fold with -fsignaling-nans. */
5754 if (HONOR_SNANS (TYPE_MODE (type)))
5757 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5758 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5761 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5762 if (TREE_CODE (addend) == REAL_CST
5763 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5766 /* The mode has signed zeros, and we have to honor their sign.
5767 In this situation, there is only one case we can return true for.
5768 X - 0 is the same as X unless rounding towards -infinity is
5770 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5773 /* Subroutine of fold() that checks comparisons of built-in math
5774 functions against real constants.
5776 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5777 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5778 is the type of the result and ARG0 and ARG1 are the operands of the
5779 comparison. ARG1 must be a TREE_REAL_CST.
5781 The function returns the constant folded tree if a simplification
5782 can be made, and NULL_TREE otherwise. */
5785 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5786 tree type, tree arg0, tree arg1)
5790 if (BUILTIN_SQRT_P (fcode))
5792 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5793 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5795 c = TREE_REAL_CST (arg1);
5796 if (REAL_VALUE_NEGATIVE (c))
5798 /* sqrt(x) < y is always false, if y is negative. */
5799 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5800 return omit_one_operand (type, integer_zero_node, arg);
5802 /* sqrt(x) > y is always true, if y is negative and we
5803 don't care about NaNs, i.e. negative values of x. */
5804 if (code == NE_EXPR || !HONOR_NANS (mode))
5805 return omit_one_operand (type, integer_one_node, arg);
5807 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5808 return fold_build2 (GE_EXPR, type, arg,
5809 build_real (TREE_TYPE (arg), dconst0));
5811 else if (code == GT_EXPR || code == GE_EXPR)
5815 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5816 real_convert (&c2, mode, &c2);
5818 if (REAL_VALUE_ISINF (c2))
5820 /* sqrt(x) > y is x == +Inf, when y is very large. */
5821 if (HONOR_INFINITIES (mode))
5822 return fold_build2 (EQ_EXPR, type, arg,
5823 build_real (TREE_TYPE (arg), c2));
5825 /* sqrt(x) > y is always false, when y is very large
5826 and we don't care about infinities. */
5827 return omit_one_operand (type, integer_zero_node, arg);
5830 /* sqrt(x) > c is the same as x > c*c. */
5831 return fold_build2 (code, type, arg,
5832 build_real (TREE_TYPE (arg), c2));
5834 else if (code == LT_EXPR || code == LE_EXPR)
5838 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5839 real_convert (&c2, mode, &c2);
5841 if (REAL_VALUE_ISINF (c2))
5843 /* sqrt(x) < y is always true, when y is a very large
5844 value and we don't care about NaNs or Infinities. */
5845 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5846 return omit_one_operand (type, integer_one_node, arg);
5848 /* sqrt(x) < y is x != +Inf when y is very large and we
5849 don't care about NaNs. */
5850 if (! HONOR_NANS (mode))
5851 return fold_build2 (NE_EXPR, type, arg,
5852 build_real (TREE_TYPE (arg), c2));
5854 /* sqrt(x) < y is x >= 0 when y is very large and we
5855 don't care about Infinities. */
5856 if (! HONOR_INFINITIES (mode))
5857 return fold_build2 (GE_EXPR, type, arg,
5858 build_real (TREE_TYPE (arg), dconst0));
5860 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5861 if (lang_hooks.decls.global_bindings_p () != 0
5862 || CONTAINS_PLACEHOLDER_P (arg))
5865 arg = save_expr (arg);
5866 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5867 fold_build2 (GE_EXPR, type, arg,
5868 build_real (TREE_TYPE (arg),
5870 fold_build2 (NE_EXPR, type, arg,
5871 build_real (TREE_TYPE (arg),
5875 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5876 if (! HONOR_NANS (mode))
5877 return fold_build2 (code, type, arg,
5878 build_real (TREE_TYPE (arg), c2));
5880 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5881 if (lang_hooks.decls.global_bindings_p () == 0
5882 && ! CONTAINS_PLACEHOLDER_P (arg))
5884 arg = save_expr (arg);
5885 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5886 fold_build2 (GE_EXPR, type, arg,
5887 build_real (TREE_TYPE (arg),
5889 fold_build2 (code, type, arg,
5890 build_real (TREE_TYPE (arg),
5899 /* Subroutine of fold() that optimizes comparisons against Infinities,
5900 either +Inf or -Inf.
5902 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5903 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5904 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5906 The function returns the constant folded tree if a simplification
5907 can be made, and NULL_TREE otherwise. */
5910 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5912 enum machine_mode mode;
5913 REAL_VALUE_TYPE max;
5917 mode = TYPE_MODE (TREE_TYPE (arg0));
5919 /* For negative infinity swap the sense of the comparison. */
5920 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5922 code = swap_tree_comparison (code);
5927 /* x > +Inf is always false, if with ignore sNANs. */
5928 if (HONOR_SNANS (mode))
5930 return omit_one_operand (type, integer_zero_node, arg0);
5933 /* x <= +Inf is always true, if we don't case about NaNs. */
5934 if (! HONOR_NANS (mode))
5935 return omit_one_operand (type, integer_one_node, arg0);
5937 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5938 if (lang_hooks.decls.global_bindings_p () == 0
5939 && ! CONTAINS_PLACEHOLDER_P (arg0))
5941 arg0 = save_expr (arg0);
5942 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5948 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5949 real_maxval (&max, neg, mode);
5950 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5951 arg0, build_real (TREE_TYPE (arg0), max));
5954 /* x < +Inf is always equal to x <= DBL_MAX. */
5955 real_maxval (&max, neg, mode);
5956 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5957 arg0, build_real (TREE_TYPE (arg0), max));
5960 /* x != +Inf is always equal to !(x > DBL_MAX). */
5961 real_maxval (&max, neg, mode);
5962 if (! HONOR_NANS (mode))
5963 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5964 arg0, build_real (TREE_TYPE (arg0), max));
5966 /* The transformation below creates non-gimple code and thus is
5967 not appropriate if we are in gimple form. */
5971 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5972 arg0, build_real (TREE_TYPE (arg0), max));
5973 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5982 /* Subroutine of fold() that optimizes comparisons of a division by
5983 a nonzero integer constant against an integer constant, i.e.
5986 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5987 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5988 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5990 The function returns the constant folded tree if a simplification
5991 can be made, and NULL_TREE otherwise. */
5994 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5996 tree prod, tmp, hi, lo;
5997 tree arg00 = TREE_OPERAND (arg0, 0);
5998 tree arg01 = TREE_OPERAND (arg0, 1);
5999 unsigned HOST_WIDE_INT lpart;
6000 HOST_WIDE_INT hpart;
6003 /* We have to do this the hard way to detect unsigned overflow.
6004 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6005 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6006 TREE_INT_CST_HIGH (arg01),
6007 TREE_INT_CST_LOW (arg1),
6008 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6009 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6010 prod = force_fit_type (prod, -1, overflow, false);
6012 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6014 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6017 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6018 overflow = add_double (TREE_INT_CST_LOW (prod),
6019 TREE_INT_CST_HIGH (prod),
6020 TREE_INT_CST_LOW (tmp),
6021 TREE_INT_CST_HIGH (tmp),
6023 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6024 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6025 TREE_CONSTANT_OVERFLOW (prod));
6027 else if (tree_int_cst_sgn (arg01) >= 0)
6029 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6030 switch (tree_int_cst_sgn (arg1))
6033 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6038 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6043 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6053 /* A negative divisor reverses the relational operators. */
6054 code = swap_tree_comparison (code);
6056 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6057 switch (tree_int_cst_sgn (arg1))
6060 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6065 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6070 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6082 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6083 return omit_one_operand (type, integer_zero_node, arg00);
6084 if (TREE_OVERFLOW (hi))
6085 return fold_build2 (GE_EXPR, type, arg00, lo);
6086 if (TREE_OVERFLOW (lo))
6087 return fold_build2 (LE_EXPR, type, arg00, hi);
6088 return build_range_check (type, arg00, 1, lo, hi);
6091 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6092 return omit_one_operand (type, integer_one_node, arg00);
6093 if (TREE_OVERFLOW (hi))
6094 return fold_build2 (LT_EXPR, type, arg00, lo);
6095 if (TREE_OVERFLOW (lo))
6096 return fold_build2 (GT_EXPR, type, arg00, hi);
6097 return build_range_check (type, arg00, 0, lo, hi);
6100 if (TREE_OVERFLOW (lo))
6101 return omit_one_operand (type, integer_zero_node, arg00);
6102 return fold_build2 (LT_EXPR, type, arg00, lo);
6105 if (TREE_OVERFLOW (hi))
6106 return omit_one_operand (type, integer_one_node, arg00);
6107 return fold_build2 (LE_EXPR, type, arg00, hi);
6110 if (TREE_OVERFLOW (hi))
6111 return omit_one_operand (type, integer_zero_node, arg00);
6112 return fold_build2 (GT_EXPR, type, arg00, hi);
6115 if (TREE_OVERFLOW (lo))
6116 return omit_one_operand (type, integer_one_node, arg00);
6117 return fold_build2 (GE_EXPR, type, arg00, lo);
6127 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6128 equality/inequality test, then return a simplified form of the test
6129 using a sign testing. Otherwise return NULL. TYPE is the desired
6133 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6136 /* If this is testing a single bit, we can optimize the test. */
6137 if ((code == NE_EXPR || code == EQ_EXPR)
6138 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6139 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6141 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6142 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6143 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6145 if (arg00 != NULL_TREE
6146 /* This is only a win if casting to a signed type is cheap,
6147 i.e. when arg00's type is not a partial mode. */
6148 && TYPE_PRECISION (TREE_TYPE (arg00))
6149 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6151 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6152 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6153 result_type, fold_convert (stype, arg00),
6154 fold_convert (stype, integer_zero_node));
6161 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6162 equality/inequality test, then return a simplified form of
6163 the test using shifts and logical operations. Otherwise return
6164 NULL. TYPE is the desired result type. */
6167 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6170 /* If this is testing a single bit, we can optimize the test. */
6171 if ((code == NE_EXPR || code == EQ_EXPR)
6172 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6173 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6175 tree inner = TREE_OPERAND (arg0, 0);
6176 tree type = TREE_TYPE (arg0);
6177 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6178 enum machine_mode operand_mode = TYPE_MODE (type);
6180 tree signed_type, unsigned_type, intermediate_type;
6183 /* First, see if we can fold the single bit test into a sign-bit
6185 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6190 /* Otherwise we have (A & C) != 0 where C is a single bit,
6191 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6192 Similarly for (A & C) == 0. */
6194 /* If INNER is a right shift of a constant and it plus BITNUM does
6195 not overflow, adjust BITNUM and INNER. */
6196 if (TREE_CODE (inner) == RSHIFT_EXPR
6197 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6198 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6199 && bitnum < TYPE_PRECISION (type)
6200 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6201 bitnum - TYPE_PRECISION (type)))
6203 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6204 inner = TREE_OPERAND (inner, 0);
6207 /* If we are going to be able to omit the AND below, we must do our
6208 operations as unsigned. If we must use the AND, we have a choice.
6209 Normally unsigned is faster, but for some machines signed is. */
6210 #ifdef LOAD_EXTEND_OP
6211 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6212 && !flag_syntax_only) ? 0 : 1;
6217 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6218 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6219 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6220 inner = fold_convert (intermediate_type, inner);
6223 inner = build2 (RSHIFT_EXPR, intermediate_type,
6224 inner, size_int (bitnum));
6226 if (code == EQ_EXPR)
6227 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6228 inner, integer_one_node);
6230 /* Put the AND last so it can combine with more things. */
6231 inner = build2 (BIT_AND_EXPR, intermediate_type,
6232 inner, integer_one_node);
6234 /* Make sure to return the proper type. */
6235 inner = fold_convert (result_type, inner);
6242 /* Check whether we are allowed to reorder operands arg0 and arg1,
6243 such that the evaluation of arg1 occurs before arg0. */
6246 reorder_operands_p (tree arg0, tree arg1)
6248 if (! flag_evaluation_order)
6250 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6252 return ! TREE_SIDE_EFFECTS (arg0)
6253 && ! TREE_SIDE_EFFECTS (arg1);
6256 /* Test whether it is preferable two swap two operands, ARG0 and
6257 ARG1, for example because ARG0 is an integer constant and ARG1
6258 isn't. If REORDER is true, only recommend swapping if we can
6259 evaluate the operands in reverse order. */
6262 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6264 STRIP_SIGN_NOPS (arg0);
6265 STRIP_SIGN_NOPS (arg1);
6267 if (TREE_CODE (arg1) == INTEGER_CST)
6269 if (TREE_CODE (arg0) == INTEGER_CST)
6272 if (TREE_CODE (arg1) == REAL_CST)
6274 if (TREE_CODE (arg0) == REAL_CST)
6277 if (TREE_CODE (arg1) == COMPLEX_CST)
6279 if (TREE_CODE (arg0) == COMPLEX_CST)
6282 if (TREE_CONSTANT (arg1))
6284 if (TREE_CONSTANT (arg0))
6290 if (reorder && flag_evaluation_order
6291 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6299 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6300 for commutative and comparison operators. Ensuring a canonical
6301 form allows the optimizers to find additional redundancies without
6302 having to explicitly check for both orderings. */
6303 if (TREE_CODE (arg0) == SSA_NAME
6304 && TREE_CODE (arg1) == SSA_NAME
6305 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6311 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6312 ARG0 is extended to a wider type. */
6315 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6317 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6319 tree shorter_type, outer_type;
6323 if (arg0_unw == arg0)
6325 shorter_type = TREE_TYPE (arg0_unw);
6327 #ifdef HAVE_canonicalize_funcptr_for_compare
6328 /* Disable this optimization if we're casting a function pointer
6329 type on targets that require function pointer canonicalization. */
6330 if (HAVE_canonicalize_funcptr_for_compare
6331 && TREE_CODE (shorter_type) == POINTER_TYPE
6332 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6336 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6339 arg1_unw = get_unwidened (arg1, shorter_type);
6341 /* If possible, express the comparison in the shorter mode. */
6342 if ((code == EQ_EXPR || code == NE_EXPR
6343 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6344 && (TREE_TYPE (arg1_unw) == shorter_type
6345 || (TREE_CODE (arg1_unw) == INTEGER_CST
6346 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6347 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6348 && int_fits_type_p (arg1_unw, shorter_type))))
6349 return fold_build2 (code, type, arg0_unw,
6350 fold_convert (shorter_type, arg1_unw));
6352 if (TREE_CODE (arg1_unw) != INTEGER_CST
6353 || TREE_CODE (shorter_type) != INTEGER_TYPE
6354 || !int_fits_type_p (arg1_unw, shorter_type))
6357 /* If we are comparing with the integer that does not fit into the range
6358 of the shorter type, the result is known. */
6359 outer_type = TREE_TYPE (arg1_unw);
6360 min = lower_bound_in_type (outer_type, shorter_type);
6361 max = upper_bound_in_type (outer_type, shorter_type);
6363 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6365 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6372 return omit_one_operand (type, integer_zero_node, arg0);
6377 return omit_one_operand (type, integer_one_node, arg0);
6383 return omit_one_operand (type, integer_one_node, arg0);
6385 return omit_one_operand (type, integer_zero_node, arg0);
6390 return omit_one_operand (type, integer_zero_node, arg0);
6392 return omit_one_operand (type, integer_one_node, arg0);
6401 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6402 ARG0 just the signedness is changed. */
6405 fold_sign_changed_comparison (enum tree_code code, tree type,
6406 tree arg0, tree arg1)
6408 tree arg0_inner, tmp;
6409 tree inner_type, outer_type;
6411 if (TREE_CODE (arg0) != NOP_EXPR
6412 && TREE_CODE (arg0) != CONVERT_EXPR)
6415 outer_type = TREE_TYPE (arg0);
6416 arg0_inner = TREE_OPERAND (arg0, 0);
6417 inner_type = TREE_TYPE (arg0_inner);
6419 #ifdef HAVE_canonicalize_funcptr_for_compare
6420 /* Disable this optimization if we're casting a function pointer
6421 type on targets that require function pointer canonicalization. */
6422 if (HAVE_canonicalize_funcptr_for_compare
6423 && TREE_CODE (inner_type) == POINTER_TYPE
6424 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6428 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6431 if (TREE_CODE (arg1) != INTEGER_CST
6432 && !((TREE_CODE (arg1) == NOP_EXPR
6433 || TREE_CODE (arg1) == CONVERT_EXPR)
6434 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6437 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6442 if (TREE_CODE (arg1) == INTEGER_CST)
6444 tmp = build_int_cst_wide (inner_type,
6445 TREE_INT_CST_LOW (arg1),
6446 TREE_INT_CST_HIGH (arg1));
6447 arg1 = force_fit_type (tmp, 0,
6448 TREE_OVERFLOW (arg1),
6449 TREE_CONSTANT_OVERFLOW (arg1));
6452 arg1 = fold_convert (inner_type, arg1);
6454 return fold_build2 (code, type, arg0_inner, arg1);
6457 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6458 step of the array. Reconstructs s and delta in the case of s * delta
6459 being an integer constant (and thus already folded).
6460 ADDR is the address. MULT is the multiplicative expression.
6461 If the function succeeds, the new address expression is returned. Otherwise
6462 NULL_TREE is returned. */
6465 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6467 tree s, delta, step;
6468 tree ref = TREE_OPERAND (addr, 0), pref;
6472 /* Canonicalize op1 into a possibly non-constant delta
6473 and an INTEGER_CST s. */
6474 if (TREE_CODE (op1) == MULT_EXPR)
6476 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6481 if (TREE_CODE (arg0) == INTEGER_CST)
6486 else if (TREE_CODE (arg1) == INTEGER_CST)
6494 else if (TREE_CODE (op1) == INTEGER_CST)
6501 /* Simulate we are delta * 1. */
6503 s = integer_one_node;
6506 for (;; ref = TREE_OPERAND (ref, 0))
6508 if (TREE_CODE (ref) == ARRAY_REF)
6510 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6514 step = array_ref_element_size (ref);
6515 if (TREE_CODE (step) != INTEGER_CST)
6520 if (! tree_int_cst_equal (step, s))
6525 /* Try if delta is a multiple of step. */
6526 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6535 if (!handled_component_p (ref))
6539 /* We found the suitable array reference. So copy everything up to it,
6540 and replace the index. */
6542 pref = TREE_OPERAND (addr, 0);
6543 ret = copy_node (pref);
6548 pref = TREE_OPERAND (pref, 0);
6549 TREE_OPERAND (pos, 0) = copy_node (pref);
6550 pos = TREE_OPERAND (pos, 0);
6553 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6554 fold_convert (itype,
6555 TREE_OPERAND (pos, 1)),
6556 fold_convert (itype, delta));
6558 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6562 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6563 means A >= Y && A != MAX, but in this case we know that
6564 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6567 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6569 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6571 if (TREE_CODE (bound) == LT_EXPR)
6572 a = TREE_OPERAND (bound, 0);
6573 else if (TREE_CODE (bound) == GT_EXPR)
6574 a = TREE_OPERAND (bound, 1);
6578 typea = TREE_TYPE (a);
6579 if (!INTEGRAL_TYPE_P (typea)
6580 && !POINTER_TYPE_P (typea))
6583 if (TREE_CODE (ineq) == LT_EXPR)
6585 a1 = TREE_OPERAND (ineq, 1);
6586 y = TREE_OPERAND (ineq, 0);
6588 else if (TREE_CODE (ineq) == GT_EXPR)
6590 a1 = TREE_OPERAND (ineq, 0);
6591 y = TREE_OPERAND (ineq, 1);
6596 if (TREE_TYPE (a1) != typea)
6599 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6600 if (!integer_onep (diff))
6603 return fold_build2 (GE_EXPR, type, a, y);
6606 /* Fold a sum or difference of at least one multiplication.
6607 Returns the folded tree or NULL if no simplification could be made. */
6610 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6612 tree arg00, arg01, arg10, arg11;
6613 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6615 /* (A * C) +- (B * C) -> (A+-B) * C.
6616 (A * C) +- A -> A * (C+-1).
6617 We are most concerned about the case where C is a constant,
6618 but other combinations show up during loop reduction. Since
6619 it is not difficult, try all four possibilities. */
6621 if (TREE_CODE (arg0) == MULT_EXPR)
6623 arg00 = TREE_OPERAND (arg0, 0);
6624 arg01 = TREE_OPERAND (arg0, 1);
6629 if (!FLOAT_TYPE_P (type))
6630 arg01 = build_int_cst (type, 1);
6632 arg01 = build_real (type, dconst1);
6634 if (TREE_CODE (arg1) == MULT_EXPR)
6636 arg10 = TREE_OPERAND (arg1, 0);
6637 arg11 = TREE_OPERAND (arg1, 1);
6642 if (!FLOAT_TYPE_P (type))
6643 arg11 = build_int_cst (type, 1);
6645 arg11 = build_real (type, dconst1);
6649 if (operand_equal_p (arg01, arg11, 0))
6650 same = arg01, alt0 = arg00, alt1 = arg10;
6651 else if (operand_equal_p (arg00, arg10, 0))
6652 same = arg00, alt0 = arg01, alt1 = arg11;
6653 else if (operand_equal_p (arg00, arg11, 0))
6654 same = arg00, alt0 = arg01, alt1 = arg10;
6655 else if (operand_equal_p (arg01, arg10, 0))
6656 same = arg01, alt0 = arg00, alt1 = arg11;
6658 /* No identical multiplicands; see if we can find a common
6659 power-of-two factor in non-power-of-two multiplies. This
6660 can help in multi-dimensional array access. */
6661 else if (host_integerp (arg01, 0)
6662 && host_integerp (arg11, 0))
6664 HOST_WIDE_INT int01, int11, tmp;
6667 int01 = TREE_INT_CST_LOW (arg01);
6668 int11 = TREE_INT_CST_LOW (arg11);
6670 /* Move min of absolute values to int11. */
6671 if ((int01 >= 0 ? int01 : -int01)
6672 < (int11 >= 0 ? int11 : -int11))
6674 tmp = int01, int01 = int11, int11 = tmp;
6675 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6682 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6684 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6685 build_int_cst (TREE_TYPE (arg00),
6690 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6695 return fold_build2 (MULT_EXPR, type,
6696 fold_build2 (code, type,
6697 fold_convert (type, alt0),
6698 fold_convert (type, alt1)),
6699 fold_convert (type, same));
6704 /* Fold a unary expression of code CODE and type TYPE with operand
6705 OP0. Return the folded expression if folding is successful.
6706 Otherwise, return NULL_TREE. */
6709 fold_unary (enum tree_code code, tree type, tree op0)
6713 enum tree_code_class kind = TREE_CODE_CLASS (code);
6715 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6716 && TREE_CODE_LENGTH (code) == 1);
6721 if (code == NOP_EXPR || code == CONVERT_EXPR
6722 || code == FLOAT_EXPR || code == ABS_EXPR)
6724 /* Don't use STRIP_NOPS, because signedness of argument type
6726 STRIP_SIGN_NOPS (arg0);
6730 /* Strip any conversions that don't change the mode. This
6731 is safe for every expression, except for a comparison
6732 expression because its signedness is derived from its
6735 Note that this is done as an internal manipulation within
6736 the constant folder, in order to find the simplest
6737 representation of the arguments so that their form can be
6738 studied. In any cases, the appropriate type conversions
6739 should be put back in the tree that will get out of the
6745 if (TREE_CODE_CLASS (code) == tcc_unary)
6747 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6748 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6749 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6750 else if (TREE_CODE (arg0) == COND_EXPR)
6752 tree arg01 = TREE_OPERAND (arg0, 1);
6753 tree arg02 = TREE_OPERAND (arg0, 2);
6754 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6755 arg01 = fold_build1 (code, type, arg01);
6756 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6757 arg02 = fold_build1 (code, type, arg02);
6758 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6761 /* If this was a conversion, and all we did was to move into
6762 inside the COND_EXPR, bring it back out. But leave it if
6763 it is a conversion from integer to integer and the
6764 result precision is no wider than a word since such a
6765 conversion is cheap and may be optimized away by combine,
6766 while it couldn't if it were outside the COND_EXPR. Then return
6767 so we don't get into an infinite recursion loop taking the
6768 conversion out and then back in. */
6770 if ((code == NOP_EXPR || code == CONVERT_EXPR
6771 || code == NON_LVALUE_EXPR)
6772 && TREE_CODE (tem) == COND_EXPR
6773 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6774 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6775 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6776 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6777 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6778 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6779 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6781 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6782 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6783 || flag_syntax_only))
6784 tem = build1 (code, type,
6786 TREE_TYPE (TREE_OPERAND
6787 (TREE_OPERAND (tem, 1), 0)),
6788 TREE_OPERAND (tem, 0),
6789 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6790 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6793 else if (COMPARISON_CLASS_P (arg0))
6795 if (TREE_CODE (type) == BOOLEAN_TYPE)
6797 arg0 = copy_node (arg0);
6798 TREE_TYPE (arg0) = type;
6801 else if (TREE_CODE (type) != INTEGER_TYPE)
6802 return fold_build3 (COND_EXPR, type, arg0,
6803 fold_build1 (code, type,
6805 fold_build1 (code, type,
6806 integer_zero_node));
6815 case FIX_TRUNC_EXPR:
6817 case FIX_FLOOR_EXPR:
6818 case FIX_ROUND_EXPR:
6819 if (TREE_TYPE (op0) == type)
6822 /* If we have (type) (a CMP b) and type is an integal type, return
6823 new expression involving the new type. */
6824 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6825 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6826 TREE_OPERAND (op0, 1));
6828 /* Handle cases of two conversions in a row. */
6829 if (TREE_CODE (op0) == NOP_EXPR
6830 || TREE_CODE (op0) == CONVERT_EXPR)
6832 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6833 tree inter_type = TREE_TYPE (op0);
6834 int inside_int = INTEGRAL_TYPE_P (inside_type);
6835 int inside_ptr = POINTER_TYPE_P (inside_type);
6836 int inside_float = FLOAT_TYPE_P (inside_type);
6837 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6838 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6839 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6840 int inter_int = INTEGRAL_TYPE_P (inter_type);
6841 int inter_ptr = POINTER_TYPE_P (inter_type);
6842 int inter_float = FLOAT_TYPE_P (inter_type);
6843 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6844 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6845 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6846 int final_int = INTEGRAL_TYPE_P (type);
6847 int final_ptr = POINTER_TYPE_P (type);
6848 int final_float = FLOAT_TYPE_P (type);
6849 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6850 unsigned int final_prec = TYPE_PRECISION (type);
6851 int final_unsignedp = TYPE_UNSIGNED (type);
6853 /* In addition to the cases of two conversions in a row
6854 handled below, if we are converting something to its own
6855 type via an object of identical or wider precision, neither
6856 conversion is needed. */
6857 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6858 && ((inter_int && final_int) || (inter_float && final_float))
6859 && inter_prec >= final_prec)
6860 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6862 /* Likewise, if the intermediate and final types are either both
6863 float or both integer, we don't need the middle conversion if
6864 it is wider than the final type and doesn't change the signedness
6865 (for integers). Avoid this if the final type is a pointer
6866 since then we sometimes need the inner conversion. Likewise if
6867 the outer has a precision not equal to the size of its mode. */
6868 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6869 || (inter_float && inside_float)
6870 || (inter_vec && inside_vec))
6871 && inter_prec >= inside_prec
6872 && (inter_float || inter_vec
6873 || inter_unsignedp == inside_unsignedp)
6874 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6875 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6877 && (! final_vec || inter_prec == inside_prec))
6878 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6880 /* If we have a sign-extension of a zero-extended value, we can
6881 replace that by a single zero-extension. */
6882 if (inside_int && inter_int && final_int
6883 && inside_prec < inter_prec && inter_prec < final_prec
6884 && inside_unsignedp && !inter_unsignedp)
6885 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6887 /* Two conversions in a row are not needed unless:
6888 - some conversion is floating-point (overstrict for now), or
6889 - some conversion is a vector (overstrict for now), or
6890 - the intermediate type is narrower than both initial and
6892 - the intermediate type and innermost type differ in signedness,
6893 and the outermost type is wider than the intermediate, or
6894 - the initial type is a pointer type and the precisions of the
6895 intermediate and final types differ, or
6896 - the final type is a pointer type and the precisions of the
6897 initial and intermediate types differ. */
6898 if (! inside_float && ! inter_float && ! final_float
6899 && ! inside_vec && ! inter_vec && ! final_vec
6900 && (inter_prec > inside_prec || inter_prec > final_prec)
6901 && ! (inside_int && inter_int
6902 && inter_unsignedp != inside_unsignedp
6903 && inter_prec < final_prec)
6904 && ((inter_unsignedp && inter_prec > inside_prec)
6905 == (final_unsignedp && final_prec > inter_prec))
6906 && ! (inside_ptr && inter_prec != final_prec)
6907 && ! (final_ptr && inside_prec != inter_prec)
6908 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6909 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6911 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6914 /* Handle (T *)&A.B.C for A being of type T and B and C
6915 living at offset zero. This occurs frequently in
6916 C++ upcasting and then accessing the base. */
6917 if (TREE_CODE (op0) == ADDR_EXPR
6918 && POINTER_TYPE_P (type)
6919 && handled_component_p (TREE_OPERAND (op0, 0)))
6921 HOST_WIDE_INT bitsize, bitpos;
6923 enum machine_mode mode;
6924 int unsignedp, volatilep;
6925 tree base = TREE_OPERAND (op0, 0);
6926 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6927 &mode, &unsignedp, &volatilep, false);
6928 /* If the reference was to a (constant) zero offset, we can use
6929 the address of the base if it has the same base type
6930 as the result type. */
6931 if (! offset && bitpos == 0
6932 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6933 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6934 return fold_convert (type, build_fold_addr_expr (base));
6937 if (TREE_CODE (op0) == MODIFY_EXPR
6938 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6939 /* Detect assigning a bitfield. */
6940 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6941 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6943 /* Don't leave an assignment inside a conversion
6944 unless assigning a bitfield. */
6945 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6946 /* First do the assignment, then return converted constant. */
6947 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6948 TREE_NO_WARNING (tem) = 1;
6949 TREE_USED (tem) = 1;
6953 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6954 constants (if x has signed type, the sign bit cannot be set
6955 in c). This folds extension into the BIT_AND_EXPR. */
6956 if (INTEGRAL_TYPE_P (type)
6957 && TREE_CODE (type) != BOOLEAN_TYPE
6958 && TREE_CODE (op0) == BIT_AND_EXPR
6959 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6962 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6965 if (TYPE_UNSIGNED (TREE_TYPE (and))
6966 || (TYPE_PRECISION (type)
6967 <= TYPE_PRECISION (TREE_TYPE (and))))
6969 else if (TYPE_PRECISION (TREE_TYPE (and1))
6970 <= HOST_BITS_PER_WIDE_INT
6971 && host_integerp (and1, 1))
6973 unsigned HOST_WIDE_INT cst;
6975 cst = tree_low_cst (and1, 1);
6976 cst &= (HOST_WIDE_INT) -1
6977 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6978 change = (cst == 0);
6979 #ifdef LOAD_EXTEND_OP
6981 && !flag_syntax_only
6982 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6985 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6986 and0 = fold_convert (uns, and0);
6987 and1 = fold_convert (uns, and1);
6993 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6994 TREE_INT_CST_HIGH (and1));
6995 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6996 TREE_CONSTANT_OVERFLOW (and1));
6997 return fold_build2 (BIT_AND_EXPR, type,
6998 fold_convert (type, and0), tem);
7002 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7003 T2 being pointers to types of the same size. */
7004 if (POINTER_TYPE_P (type)
7005 && BINARY_CLASS_P (arg0)
7006 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7007 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7009 tree arg00 = TREE_OPERAND (arg0, 0);
7011 tree t1 = TREE_TYPE (arg00);
7012 tree tt0 = TREE_TYPE (t0);
7013 tree tt1 = TREE_TYPE (t1);
7014 tree s0 = TYPE_SIZE (tt0);
7015 tree s1 = TYPE_SIZE (tt1);
7017 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7018 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7019 TREE_OPERAND (arg0, 1));
7022 tem = fold_convert_const (code, type, arg0);
7023 return tem ? tem : NULL_TREE;
7025 case VIEW_CONVERT_EXPR:
7026 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7027 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7031 if (negate_expr_p (arg0))
7032 return fold_convert (type, negate_expr (arg0));
7033 /* Convert - (~A) to A + 1. */
7034 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
7035 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
7036 build_int_cst (type, 1));
7040 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7041 return fold_abs_const (arg0, type);
7042 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7043 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7044 /* Convert fabs((double)float) into (double)fabsf(float). */
7045 else if (TREE_CODE (arg0) == NOP_EXPR
7046 && TREE_CODE (type) == REAL_TYPE)
7048 tree targ0 = strip_float_extensions (arg0);
7050 return fold_convert (type, fold_build1 (ABS_EXPR,
7054 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7055 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7058 /* Strip sign ops from argument. */
7059 if (TREE_CODE (type) == REAL_TYPE)
7061 tem = fold_strip_sign_ops (arg0);
7063 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7068 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7069 return fold_convert (type, arg0);
7070 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7071 return build2 (COMPLEX_EXPR, type,
7072 TREE_OPERAND (arg0, 0),
7073 negate_expr (TREE_OPERAND (arg0, 1)));
7074 else if (TREE_CODE (arg0) == COMPLEX_CST)
7075 return build_complex (type, TREE_REALPART (arg0),
7076 negate_expr (TREE_IMAGPART (arg0)));
7077 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7078 return fold_build2 (TREE_CODE (arg0), type,
7079 fold_build1 (CONJ_EXPR, type,
7080 TREE_OPERAND (arg0, 0)),
7081 fold_build1 (CONJ_EXPR, type,
7082 TREE_OPERAND (arg0, 1)));
7083 else if (TREE_CODE (arg0) == CONJ_EXPR)
7084 return TREE_OPERAND (arg0, 0);
7088 if (TREE_CODE (arg0) == INTEGER_CST)
7089 return fold_not_const (arg0, type);
7090 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7091 return TREE_OPERAND (arg0, 0);
7092 /* Convert ~ (-A) to A - 1. */
7093 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7094 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7095 build_int_cst (type, 1));
7096 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7097 else if (INTEGRAL_TYPE_P (type)
7098 && ((TREE_CODE (arg0) == MINUS_EXPR
7099 && integer_onep (TREE_OPERAND (arg0, 1)))
7100 || (TREE_CODE (arg0) == PLUS_EXPR
7101 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7102 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7103 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7104 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7105 && (tem = fold_unary (BIT_NOT_EXPR, type,
7107 TREE_OPERAND (arg0, 0)))))
7108 return fold_build2 (BIT_XOR_EXPR, type, tem,
7109 fold_convert (type, TREE_OPERAND (arg0, 1)));
7110 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7111 && (tem = fold_unary (BIT_NOT_EXPR, type,
7113 TREE_OPERAND (arg0, 1)))))
7114 return fold_build2 (BIT_XOR_EXPR, type,
7115 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7119 case TRUTH_NOT_EXPR:
7120 /* The argument to invert_truthvalue must have Boolean type. */
7121 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7122 arg0 = fold_convert (boolean_type_node, arg0);
7124 /* Note that the operand of this must be an int
7125 and its values must be 0 or 1.
7126 ("true" is a fixed value perhaps depending on the language,
7127 but we don't handle values other than 1 correctly yet.) */
7128 tem = invert_truthvalue (arg0);
7129 /* Avoid infinite recursion. */
7130 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7132 return fold_convert (type, tem);
7135 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7137 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7138 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7139 TREE_OPERAND (arg0, 1));
7140 else if (TREE_CODE (arg0) == COMPLEX_CST)
7141 return TREE_REALPART (arg0);
7142 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7143 return fold_build2 (TREE_CODE (arg0), type,
7144 fold_build1 (REALPART_EXPR, type,
7145 TREE_OPERAND (arg0, 0)),
7146 fold_build1 (REALPART_EXPR, type,
7147 TREE_OPERAND (arg0, 1)));
7151 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7152 return fold_convert (type, integer_zero_node);
7153 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7154 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7155 TREE_OPERAND (arg0, 0));
7156 else if (TREE_CODE (arg0) == COMPLEX_CST)
7157 return TREE_IMAGPART (arg0);
7158 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7159 return fold_build2 (TREE_CODE (arg0), type,
7160 fold_build1 (IMAGPART_EXPR, type,
7161 TREE_OPERAND (arg0, 0)),
7162 fold_build1 (IMAGPART_EXPR, type,
7163 TREE_OPERAND (arg0, 1)));
7168 } /* switch (code) */
7171 /* Fold a binary expression of code CODE and type TYPE with operands
7172 OP0 and OP1. Return the folded expression if folding is
7173 successful. Otherwise, return NULL_TREE. */
7176 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7178 tree t1 = NULL_TREE;
7180 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7181 enum tree_code_class kind = TREE_CODE_CLASS (code);
7183 /* WINS will be nonzero when the switch is done
7184 if all operands are constant. */
7187 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7188 && TREE_CODE_LENGTH (code) == 2);
7197 /* Strip any conversions that don't change the mode. This is
7198 safe for every expression, except for a comparison expression
7199 because its signedness is derived from its operands. So, in
7200 the latter case, only strip conversions that don't change the
7203 Note that this is done as an internal manipulation within the
7204 constant folder, in order to find the simplest representation
7205 of the arguments so that their form can be studied. In any
7206 cases, the appropriate type conversions should be put back in
7207 the tree that will get out of the constant folder. */
7208 if (kind == tcc_comparison)
7209 STRIP_SIGN_NOPS (arg0);
7213 if (TREE_CODE (arg0) == COMPLEX_CST)
7214 subop = TREE_REALPART (arg0);
7218 if (TREE_CODE (subop) != INTEGER_CST
7219 && TREE_CODE (subop) != REAL_CST)
7220 /* Note that TREE_CONSTANT isn't enough:
7221 static var addresses are constant but we can't
7222 do arithmetic on them. */
7230 /* Strip any conversions that don't change the mode. This is
7231 safe for every expression, except for a comparison expression
7232 because its signedness is derived from its operands. So, in
7233 the latter case, only strip conversions that don't change the
7236 Note that this is done as an internal manipulation within the
7237 constant folder, in order to find the simplest representation
7238 of the arguments so that their form can be studied. In any
7239 cases, the appropriate type conversions should be put back in
7240 the tree that will get out of the constant folder. */
7241 if (kind == tcc_comparison)
7242 STRIP_SIGN_NOPS (arg1);
7246 if (TREE_CODE (arg1) == COMPLEX_CST)
7247 subop = TREE_REALPART (arg1);
7251 if (TREE_CODE (subop) != INTEGER_CST
7252 && TREE_CODE (subop) != REAL_CST)
7253 /* Note that TREE_CONSTANT isn't enough:
7254 static var addresses are constant but we can't
7255 do arithmetic on them. */
7259 /* If this is a commutative operation, and ARG0 is a constant, move it
7260 to ARG1 to reduce the number of tests below. */
7261 if (commutative_tree_code (code)
7262 && tree_swap_operands_p (arg0, arg1, true))
7263 return fold_build2 (code, type, op1, op0);
7265 /* Now WINS is set as described above,
7266 ARG0 is the first operand of EXPR,
7267 and ARG1 is the second operand (if it has more than one operand).
7269 First check for cases where an arithmetic operation is applied to a
7270 compound, conditional, or comparison operation. Push the arithmetic
7271 operation inside the compound or conditional to see if any folding
7272 can then be done. Convert comparison to conditional for this purpose.
7273 The also optimizes non-constant cases that used to be done in
7276 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7277 one of the operands is a comparison and the other is a comparison, a
7278 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7279 code below would make the expression more complex. Change it to a
7280 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7281 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7283 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7284 || code == EQ_EXPR || code == NE_EXPR)
7285 && ((truth_value_p (TREE_CODE (arg0))
7286 && (truth_value_p (TREE_CODE (arg1))
7287 || (TREE_CODE (arg1) == BIT_AND_EXPR
7288 && integer_onep (TREE_OPERAND (arg1, 1)))))
7289 || (truth_value_p (TREE_CODE (arg1))
7290 && (truth_value_p (TREE_CODE (arg0))
7291 || (TREE_CODE (arg0) == BIT_AND_EXPR
7292 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7294 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7295 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7298 fold_convert (boolean_type_node, arg0),
7299 fold_convert (boolean_type_node, arg1));
7301 if (code == EQ_EXPR)
7302 tem = invert_truthvalue (tem);
7304 return fold_convert (type, tem);
7307 if (TREE_CODE_CLASS (code) == tcc_binary
7308 || TREE_CODE_CLASS (code) == tcc_comparison)
7310 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7311 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7312 fold_build2 (code, type,
7313 TREE_OPERAND (arg0, 1), op1));
7314 if (TREE_CODE (arg1) == COMPOUND_EXPR
7315 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7316 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7317 fold_build2 (code, type,
7318 op0, TREE_OPERAND (arg1, 1)));
7320 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7322 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7324 /*cond_first_p=*/1);
7325 if (tem != NULL_TREE)
7329 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7331 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7333 /*cond_first_p=*/0);
7334 if (tem != NULL_TREE)
7342 /* A + (-B) -> A - B */
7343 if (TREE_CODE (arg1) == NEGATE_EXPR)
7344 return fold_build2 (MINUS_EXPR, type,
7345 fold_convert (type, arg0),
7346 fold_convert (type, TREE_OPERAND (arg1, 0)));
7347 /* (-A) + B -> B - A */
7348 if (TREE_CODE (arg0) == NEGATE_EXPR
7349 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7350 return fold_build2 (MINUS_EXPR, type,
7351 fold_convert (type, arg1),
7352 fold_convert (type, TREE_OPERAND (arg0, 0)));
7353 /* Convert ~A + 1 to -A. */
7354 if (INTEGRAL_TYPE_P (type)
7355 && TREE_CODE (arg0) == BIT_NOT_EXPR
7356 && integer_onep (arg1))
7357 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7359 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7361 if ((TREE_CODE (arg0) == MULT_EXPR
7362 || TREE_CODE (arg1) == MULT_EXPR)
7363 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7365 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7370 if (! FLOAT_TYPE_P (type))
7372 if (integer_zerop (arg1))
7373 return non_lvalue (fold_convert (type, arg0));
7375 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7376 with a constant, and the two constants have no bits in common,
7377 we should treat this as a BIT_IOR_EXPR since this may produce more
7379 if (TREE_CODE (arg0) == BIT_AND_EXPR
7380 && TREE_CODE (arg1) == BIT_AND_EXPR
7381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7382 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7383 && integer_zerop (const_binop (BIT_AND_EXPR,
7384 TREE_OPERAND (arg0, 1),
7385 TREE_OPERAND (arg1, 1), 0)))
7387 code = BIT_IOR_EXPR;
7391 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7392 (plus (plus (mult) (mult)) (foo)) so that we can
7393 take advantage of the factoring cases below. */
7394 if (((TREE_CODE (arg0) == PLUS_EXPR
7395 || TREE_CODE (arg0) == MINUS_EXPR)
7396 && TREE_CODE (arg1) == MULT_EXPR)
7397 || ((TREE_CODE (arg1) == PLUS_EXPR
7398 || TREE_CODE (arg1) == MINUS_EXPR)
7399 && TREE_CODE (arg0) == MULT_EXPR))
7401 tree parg0, parg1, parg, marg;
7402 enum tree_code pcode;
7404 if (TREE_CODE (arg1) == MULT_EXPR)
7405 parg = arg0, marg = arg1;
7407 parg = arg1, marg = arg0;
7408 pcode = TREE_CODE (parg);
7409 parg0 = TREE_OPERAND (parg, 0);
7410 parg1 = TREE_OPERAND (parg, 1);
7414 if (TREE_CODE (parg0) == MULT_EXPR
7415 && TREE_CODE (parg1) != MULT_EXPR)
7416 return fold_build2 (pcode, type,
7417 fold_build2 (PLUS_EXPR, type,
7418 fold_convert (type, parg0),
7419 fold_convert (type, marg)),
7420 fold_convert (type, parg1));
7421 if (TREE_CODE (parg0) != MULT_EXPR
7422 && TREE_CODE (parg1) == MULT_EXPR)
7423 return fold_build2 (PLUS_EXPR, type,
7424 fold_convert (type, parg0),
7425 fold_build2 (pcode, type,
7426 fold_convert (type, marg),
7431 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7432 of the array. Loop optimizer sometimes produce this type of
7434 if (TREE_CODE (arg0) == ADDR_EXPR)
7436 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7438 return fold_convert (type, tem);
7440 else if (TREE_CODE (arg1) == ADDR_EXPR)
7442 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7444 return fold_convert (type, tem);
7449 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7450 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7451 return non_lvalue (fold_convert (type, arg0));
7453 /* Likewise if the operands are reversed. */
7454 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7455 return non_lvalue (fold_convert (type, arg1));
7457 /* Convert X + -C into X - C. */
7458 if (TREE_CODE (arg1) == REAL_CST
7459 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7461 tem = fold_negate_const (arg1, type);
7462 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7463 return fold_build2 (MINUS_EXPR, type,
7464 fold_convert (type, arg0),
7465 fold_convert (type, tem));
7468 if (flag_unsafe_math_optimizations
7469 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7470 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7471 && (tem = distribute_real_division (code, type, arg0, arg1)))
7474 /* Convert x+x into x*2.0. */
7475 if (operand_equal_p (arg0, arg1, 0)
7476 && SCALAR_FLOAT_TYPE_P (type))
7477 return fold_build2 (MULT_EXPR, type, arg0,
7478 build_real (type, dconst2));
7480 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7481 if (flag_unsafe_math_optimizations
7482 && TREE_CODE (arg1) == PLUS_EXPR
7483 && TREE_CODE (arg0) != MULT_EXPR)
7485 tree tree10 = TREE_OPERAND (arg1, 0);
7486 tree tree11 = TREE_OPERAND (arg1, 1);
7487 if (TREE_CODE (tree11) == MULT_EXPR
7488 && TREE_CODE (tree10) == MULT_EXPR)
7491 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7492 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7495 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7496 if (flag_unsafe_math_optimizations
7497 && TREE_CODE (arg0) == PLUS_EXPR
7498 && TREE_CODE (arg1) != MULT_EXPR)
7500 tree tree00 = TREE_OPERAND (arg0, 0);
7501 tree tree01 = TREE_OPERAND (arg0, 1);
7502 if (TREE_CODE (tree01) == MULT_EXPR
7503 && TREE_CODE (tree00) == MULT_EXPR)
7506 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7507 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7513 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7514 is a rotate of A by C1 bits. */
7515 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7516 is a rotate of A by B bits. */
7518 enum tree_code code0, code1;
7519 code0 = TREE_CODE (arg0);
7520 code1 = TREE_CODE (arg1);
7521 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7522 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7523 && operand_equal_p (TREE_OPERAND (arg0, 0),
7524 TREE_OPERAND (arg1, 0), 0)
7525 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7527 tree tree01, tree11;
7528 enum tree_code code01, code11;
7530 tree01 = TREE_OPERAND (arg0, 1);
7531 tree11 = TREE_OPERAND (arg1, 1);
7532 STRIP_NOPS (tree01);
7533 STRIP_NOPS (tree11);
7534 code01 = TREE_CODE (tree01);
7535 code11 = TREE_CODE (tree11);
7536 if (code01 == INTEGER_CST
7537 && code11 == INTEGER_CST
7538 && TREE_INT_CST_HIGH (tree01) == 0
7539 && TREE_INT_CST_HIGH (tree11) == 0
7540 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7541 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7542 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7543 code0 == LSHIFT_EXPR ? tree01 : tree11);
7544 else if (code11 == MINUS_EXPR)
7546 tree tree110, tree111;
7547 tree110 = TREE_OPERAND (tree11, 0);
7548 tree111 = TREE_OPERAND (tree11, 1);
7549 STRIP_NOPS (tree110);
7550 STRIP_NOPS (tree111);
7551 if (TREE_CODE (tree110) == INTEGER_CST
7552 && 0 == compare_tree_int (tree110,
7554 (TREE_TYPE (TREE_OPERAND
7556 && operand_equal_p (tree01, tree111, 0))
7557 return build2 ((code0 == LSHIFT_EXPR
7560 type, TREE_OPERAND (arg0, 0), tree01);
7562 else if (code01 == MINUS_EXPR)
7564 tree tree010, tree011;
7565 tree010 = TREE_OPERAND (tree01, 0);
7566 tree011 = TREE_OPERAND (tree01, 1);
7567 STRIP_NOPS (tree010);
7568 STRIP_NOPS (tree011);
7569 if (TREE_CODE (tree010) == INTEGER_CST
7570 && 0 == compare_tree_int (tree010,
7572 (TREE_TYPE (TREE_OPERAND
7574 && operand_equal_p (tree11, tree011, 0))
7575 return build2 ((code0 != LSHIFT_EXPR
7578 type, TREE_OPERAND (arg0, 0), tree11);
7584 /* In most languages, can't associate operations on floats through
7585 parentheses. Rather than remember where the parentheses were, we
7586 don't associate floats at all, unless the user has specified
7587 -funsafe-math-optimizations. */
7590 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7592 tree var0, con0, lit0, minus_lit0;
7593 tree var1, con1, lit1, minus_lit1;
7595 /* Split both trees into variables, constants, and literals. Then
7596 associate each group together, the constants with literals,
7597 then the result with variables. This increases the chances of
7598 literals being recombined later and of generating relocatable
7599 expressions for the sum of a constant and literal. */
7600 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7601 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7602 code == MINUS_EXPR);
7604 /* Only do something if we found more than two objects. Otherwise,
7605 nothing has changed and we risk infinite recursion. */
7606 if (2 < ((var0 != 0) + (var1 != 0)
7607 + (con0 != 0) + (con1 != 0)
7608 + (lit0 != 0) + (lit1 != 0)
7609 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7611 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7612 if (code == MINUS_EXPR)
7615 var0 = associate_trees (var0, var1, code, type);
7616 con0 = associate_trees (con0, con1, code, type);
7617 lit0 = associate_trees (lit0, lit1, code, type);
7618 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7620 /* Preserve the MINUS_EXPR if the negative part of the literal is
7621 greater than the positive part. Otherwise, the multiplicative
7622 folding code (i.e extract_muldiv) may be fooled in case
7623 unsigned constants are subtracted, like in the following
7624 example: ((X*2 + 4) - 8U)/2. */
7625 if (minus_lit0 && lit0)
7627 if (TREE_CODE (lit0) == INTEGER_CST
7628 && TREE_CODE (minus_lit0) == INTEGER_CST
7629 && tree_int_cst_lt (lit0, minus_lit0))
7631 minus_lit0 = associate_trees (minus_lit0, lit0,
7637 lit0 = associate_trees (lit0, minus_lit0,
7645 return fold_convert (type,
7646 associate_trees (var0, minus_lit0,
7650 con0 = associate_trees (con0, minus_lit0,
7652 return fold_convert (type,
7653 associate_trees (var0, con0,
7658 con0 = associate_trees (con0, lit0, code, type);
7659 return fold_convert (type, associate_trees (var0, con0,
7666 t1 = const_binop (code, arg0, arg1, 0);
7667 if (t1 != NULL_TREE)
7669 /* The return value should always have
7670 the same type as the original expression. */
7671 if (TREE_TYPE (t1) != type)
7672 t1 = fold_convert (type, t1);
7679 /* A - (-B) -> A + B */
7680 if (TREE_CODE (arg1) == NEGATE_EXPR)
7681 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7682 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7683 if (TREE_CODE (arg0) == NEGATE_EXPR
7684 && (FLOAT_TYPE_P (type)
7685 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7686 && negate_expr_p (arg1)
7687 && reorder_operands_p (arg0, arg1))
7688 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7689 TREE_OPERAND (arg0, 0));
7690 /* Convert -A - 1 to ~A. */
7691 if (INTEGRAL_TYPE_P (type)
7692 && TREE_CODE (arg0) == NEGATE_EXPR
7693 && integer_onep (arg1))
7694 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7696 /* Convert -1 - A to ~A. */
7697 if (INTEGRAL_TYPE_P (type)
7698 && integer_all_onesp (arg0))
7699 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7701 if (! FLOAT_TYPE_P (type))
7703 if (! wins && integer_zerop (arg0))
7704 return negate_expr (fold_convert (type, arg1));
7705 if (integer_zerop (arg1))
7706 return non_lvalue (fold_convert (type, arg0));
7708 /* Fold A - (A & B) into ~B & A. */
7709 if (!TREE_SIDE_EFFECTS (arg0)
7710 && TREE_CODE (arg1) == BIT_AND_EXPR)
7712 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7713 return fold_build2 (BIT_AND_EXPR, type,
7714 fold_build1 (BIT_NOT_EXPR, type,
7715 TREE_OPERAND (arg1, 0)),
7717 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7718 return fold_build2 (BIT_AND_EXPR, type,
7719 fold_build1 (BIT_NOT_EXPR, type,
7720 TREE_OPERAND (arg1, 1)),
7724 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7725 any power of 2 minus 1. */
7726 if (TREE_CODE (arg0) == BIT_AND_EXPR
7727 && TREE_CODE (arg1) == BIT_AND_EXPR
7728 && operand_equal_p (TREE_OPERAND (arg0, 0),
7729 TREE_OPERAND (arg1, 0), 0))
7731 tree mask0 = TREE_OPERAND (arg0, 1);
7732 tree mask1 = TREE_OPERAND (arg1, 1);
7733 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7735 if (operand_equal_p (tem, mask1, 0))
7737 tem = fold_build2 (BIT_XOR_EXPR, type,
7738 TREE_OPERAND (arg0, 0), mask1);
7739 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7744 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7745 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7746 return non_lvalue (fold_convert (type, arg0));
7748 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7749 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7750 (-ARG1 + ARG0) reduces to -ARG1. */
7751 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7752 return negate_expr (fold_convert (type, arg1));
7754 /* Fold &x - &x. This can happen from &x.foo - &x.
7755 This is unsafe for certain floats even in non-IEEE formats.
7756 In IEEE, it is unsafe because it does wrong for NaNs.
7757 Also note that operand_equal_p is always false if an operand
7760 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7761 && operand_equal_p (arg0, arg1, 0))
7762 return fold_convert (type, integer_zero_node);
7764 /* A - B -> A + (-B) if B is easily negatable. */
7765 if (!wins && negate_expr_p (arg1)
7766 && ((FLOAT_TYPE_P (type)
7767 /* Avoid this transformation if B is a positive REAL_CST. */
7768 && (TREE_CODE (arg1) != REAL_CST
7769 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7770 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7771 return fold_build2 (PLUS_EXPR, type,
7772 fold_convert (type, arg0),
7773 fold_convert (type, negate_expr (arg1)));
7775 /* Try folding difference of addresses. */
7779 if ((TREE_CODE (arg0) == ADDR_EXPR
7780 || TREE_CODE (arg1) == ADDR_EXPR)
7781 && ptr_difference_const (arg0, arg1, &diff))
7782 return build_int_cst_type (type, diff);
7785 /* Fold &a[i] - &a[j] to i-j. */
7786 if (TREE_CODE (arg0) == ADDR_EXPR
7787 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7788 && TREE_CODE (arg1) == ADDR_EXPR
7789 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7791 tree aref0 = TREE_OPERAND (arg0, 0);
7792 tree aref1 = TREE_OPERAND (arg1, 0);
7793 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7794 TREE_OPERAND (aref1, 0), 0))
7796 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7797 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7798 tree esz = array_ref_element_size (aref0);
7799 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7800 return fold_build2 (MULT_EXPR, type, diff,
7801 fold_convert (type, esz));
7806 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7807 of the array. Loop optimizer sometimes produce this type of
7809 if (TREE_CODE (arg0) == ADDR_EXPR)
7811 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7813 return fold_convert (type, tem);
7816 if (flag_unsafe_math_optimizations
7817 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7818 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7819 && (tem = distribute_real_division (code, type, arg0, arg1)))
7822 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7824 if ((TREE_CODE (arg0) == MULT_EXPR
7825 || TREE_CODE (arg1) == MULT_EXPR)
7826 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7828 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7836 /* (-A) * (-B) -> A * B */
7837 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7838 return fold_build2 (MULT_EXPR, type,
7839 TREE_OPERAND (arg0, 0),
7840 negate_expr (arg1));
7841 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7842 return fold_build2 (MULT_EXPR, type,
7844 TREE_OPERAND (arg1, 0));
7846 if (! FLOAT_TYPE_P (type))
7848 if (integer_zerop (arg1))
7849 return omit_one_operand (type, arg1, arg0);
7850 if (integer_onep (arg1))
7851 return non_lvalue (fold_convert (type, arg0));
7852 /* Transform x * -1 into -x. */
7853 if (integer_all_onesp (arg1))
7854 return fold_convert (type, negate_expr (arg0));
7856 /* (a * (1 << b)) is (a << b) */
7857 if (TREE_CODE (arg1) == LSHIFT_EXPR
7858 && integer_onep (TREE_OPERAND (arg1, 0)))
7859 return fold_build2 (LSHIFT_EXPR, type, arg0,
7860 TREE_OPERAND (arg1, 1));
7861 if (TREE_CODE (arg0) == LSHIFT_EXPR
7862 && integer_onep (TREE_OPERAND (arg0, 0)))
7863 return fold_build2 (LSHIFT_EXPR, type, arg1,
7864 TREE_OPERAND (arg0, 1));
7866 if (TREE_CODE (arg1) == INTEGER_CST
7867 && 0 != (tem = extract_muldiv (op0,
7868 fold_convert (type, arg1),
7870 return fold_convert (type, tem);
7875 /* Maybe fold x * 0 to 0. The expressions aren't the same
7876 when x is NaN, since x * 0 is also NaN. Nor are they the
7877 same in modes with signed zeros, since multiplying a
7878 negative value by 0 gives -0, not +0. */
7879 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7880 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7881 && real_zerop (arg1))
7882 return omit_one_operand (type, arg1, arg0);
7883 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7884 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7885 && real_onep (arg1))
7886 return non_lvalue (fold_convert (type, arg0));
7888 /* Transform x * -1.0 into -x. */
7889 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7890 && real_minus_onep (arg1))
7891 return fold_convert (type, negate_expr (arg0));
7893 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7894 if (flag_unsafe_math_optimizations
7895 && TREE_CODE (arg0) == RDIV_EXPR
7896 && TREE_CODE (arg1) == REAL_CST
7897 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7899 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7902 return fold_build2 (RDIV_EXPR, type, tem,
7903 TREE_OPERAND (arg0, 1));
7906 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7907 if (operand_equal_p (arg0, arg1, 0))
7909 tree tem = fold_strip_sign_ops (arg0);
7910 if (tem != NULL_TREE)
7912 tem = fold_convert (type, tem);
7913 return fold_build2 (MULT_EXPR, type, tem, tem);
7917 if (flag_unsafe_math_optimizations)
7919 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7920 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7922 /* Optimizations of root(...)*root(...). */
7923 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7925 tree rootfn, arg, arglist;
7926 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7927 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7929 /* Optimize sqrt(x)*sqrt(x) as x. */
7930 if (BUILTIN_SQRT_P (fcode0)
7931 && operand_equal_p (arg00, arg10, 0)
7932 && ! HONOR_SNANS (TYPE_MODE (type)))
7935 /* Optimize root(x)*root(y) as root(x*y). */
7936 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7937 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7938 arglist = build_tree_list (NULL_TREE, arg);
7939 return build_function_call_expr (rootfn, arglist);
7942 /* Optimize expN(x)*expN(y) as expN(x+y). */
7943 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7945 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7946 tree arg = fold_build2 (PLUS_EXPR, type,
7947 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7948 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7949 tree arglist = build_tree_list (NULL_TREE, arg);
7950 return build_function_call_expr (expfn, arglist);
7953 /* Optimizations of pow(...)*pow(...). */
7954 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7955 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7956 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7958 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7959 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7961 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7962 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7965 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7966 if (operand_equal_p (arg01, arg11, 0))
7968 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7969 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7970 tree arglist = tree_cons (NULL_TREE, arg,
7971 build_tree_list (NULL_TREE,
7973 return build_function_call_expr (powfn, arglist);
7976 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7977 if (operand_equal_p (arg00, arg10, 0))
7979 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7980 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7981 tree arglist = tree_cons (NULL_TREE, arg00,
7982 build_tree_list (NULL_TREE,
7984 return build_function_call_expr (powfn, arglist);
7988 /* Optimize tan(x)*cos(x) as sin(x). */
7989 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7990 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7991 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7992 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7993 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7994 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7995 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7996 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7998 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8000 if (sinfn != NULL_TREE)
8001 return build_function_call_expr (sinfn,
8002 TREE_OPERAND (arg0, 1));
8005 /* Optimize x*pow(x,c) as pow(x,c+1). */
8006 if (fcode1 == BUILT_IN_POW
8007 || fcode1 == BUILT_IN_POWF
8008 || fcode1 == BUILT_IN_POWL)
8010 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8011 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8013 if (TREE_CODE (arg11) == REAL_CST
8014 && ! TREE_CONSTANT_OVERFLOW (arg11)
8015 && operand_equal_p (arg0, arg10, 0))
8017 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8021 c = TREE_REAL_CST (arg11);
8022 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8023 arg = build_real (type, c);
8024 arglist = build_tree_list (NULL_TREE, arg);
8025 arglist = tree_cons (NULL_TREE, arg0, arglist);
8026 return build_function_call_expr (powfn, arglist);
8030 /* Optimize pow(x,c)*x as pow(x,c+1). */
8031 if (fcode0 == BUILT_IN_POW
8032 || fcode0 == BUILT_IN_POWF
8033 || fcode0 == BUILT_IN_POWL)
8035 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8036 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8038 if (TREE_CODE (arg01) == REAL_CST
8039 && ! TREE_CONSTANT_OVERFLOW (arg01)
8040 && operand_equal_p (arg1, arg00, 0))
8042 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8046 c = TREE_REAL_CST (arg01);
8047 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8048 arg = build_real (type, c);
8049 arglist = build_tree_list (NULL_TREE, arg);
8050 arglist = tree_cons (NULL_TREE, arg1, arglist);
8051 return build_function_call_expr (powfn, arglist);
8055 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8057 && operand_equal_p (arg0, arg1, 0))
8059 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8063 tree arg = build_real (type, dconst2);
8064 tree arglist = build_tree_list (NULL_TREE, arg);
8065 arglist = tree_cons (NULL_TREE, arg0, arglist);
8066 return build_function_call_expr (powfn, arglist);
8075 if (integer_all_onesp (arg1))
8076 return omit_one_operand (type, arg1, arg0);
8077 if (integer_zerop (arg1))
8078 return non_lvalue (fold_convert (type, arg0));
8079 if (operand_equal_p (arg0, arg1, 0))
8080 return non_lvalue (fold_convert (type, arg0));
8083 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8084 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8086 t1 = build_int_cst (type, -1);
8087 t1 = force_fit_type (t1, 0, false, false);
8088 return omit_one_operand (type, t1, arg1);
8092 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8093 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8095 t1 = build_int_cst (type, -1);
8096 t1 = force_fit_type (t1, 0, false, false);
8097 return omit_one_operand (type, t1, arg0);
8100 t1 = distribute_bit_expr (code, type, arg0, arg1);
8101 if (t1 != NULL_TREE)
8104 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8106 This results in more efficient code for machines without a NAND
8107 instruction. Combine will canonicalize to the first form
8108 which will allow use of NAND instructions provided by the
8109 backend if they exist. */
8110 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8111 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8113 return fold_build1 (BIT_NOT_EXPR, type,
8114 build2 (BIT_AND_EXPR, type,
8115 TREE_OPERAND (arg0, 0),
8116 TREE_OPERAND (arg1, 0)));
8119 /* See if this can be simplified into a rotate first. If that
8120 is unsuccessful continue in the association code. */
8124 if (integer_zerop (arg1))
8125 return non_lvalue (fold_convert (type, arg0));
8126 if (integer_all_onesp (arg1))
8127 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8128 if (operand_equal_p (arg0, arg1, 0))
8129 return omit_one_operand (type, integer_zero_node, arg0);
8132 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8135 t1 = build_int_cst (type, -1);
8136 t1 = force_fit_type (t1, 0, false, false);
8137 return omit_one_operand (type, t1, arg1);
8141 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8142 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8144 t1 = build_int_cst (type, -1);
8145 t1 = force_fit_type (t1, 0, false, false);
8146 return omit_one_operand (type, t1, arg0);
8149 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8150 with a constant, and the two constants have no bits in common,
8151 we should treat this as a BIT_IOR_EXPR since this may produce more
8153 if (TREE_CODE (arg0) == BIT_AND_EXPR
8154 && TREE_CODE (arg1) == BIT_AND_EXPR
8155 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8156 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8157 && integer_zerop (const_binop (BIT_AND_EXPR,
8158 TREE_OPERAND (arg0, 1),
8159 TREE_OPERAND (arg1, 1), 0)))
8161 code = BIT_IOR_EXPR;
8165 /* (X | Y) ^ X -> Y & ~ X*/
8166 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8167 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8169 tree t2 = TREE_OPERAND (arg0, 1);
8170 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8172 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8173 fold_convert (type, t1));
8177 /* (Y | X) ^ X -> Y & ~ X*/
8178 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8179 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8181 tree t2 = TREE_OPERAND (arg0, 0);
8182 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8184 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8185 fold_convert (type, t1));
8189 /* X ^ (X | Y) -> Y & ~ X*/
8190 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8191 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8193 tree t2 = TREE_OPERAND (arg1, 1);
8194 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8196 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8197 fold_convert (type, t1));
8201 /* X ^ (Y | X) -> Y & ~ X*/
8202 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8203 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8205 tree t2 = TREE_OPERAND (arg1, 0);
8206 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8208 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8209 fold_convert (type, t1));
8213 /* Convert ~X ^ ~Y to X ^ Y. */
8214 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8215 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8216 return fold_build2 (code, type,
8217 fold_convert (type, TREE_OPERAND (arg0, 0)),
8218 fold_convert (type, TREE_OPERAND (arg1, 0)));
8220 /* See if this can be simplified into a rotate first. If that
8221 is unsuccessful continue in the association code. */
8225 if (integer_all_onesp (arg1))
8226 return non_lvalue (fold_convert (type, arg0));
8227 if (integer_zerop (arg1))
8228 return omit_one_operand (type, arg1, arg0);
8229 if (operand_equal_p (arg0, arg1, 0))
8230 return non_lvalue (fold_convert (type, arg0));
8232 /* ~X & X is always zero. */
8233 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8234 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8235 return omit_one_operand (type, integer_zero_node, arg1);
8237 /* X & ~X is always zero. */
8238 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8239 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8240 return omit_one_operand (type, integer_zero_node, arg0);
8242 t1 = distribute_bit_expr (code, type, arg0, arg1);
8243 if (t1 != NULL_TREE)
8245 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8246 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8247 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8250 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8252 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8253 && (~TREE_INT_CST_LOW (arg1)
8254 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8255 return fold_convert (type, TREE_OPERAND (arg0, 0));
8258 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8260 This results in more efficient code for machines without a NOR
8261 instruction. Combine will canonicalize to the first form
8262 which will allow use of NOR instructions provided by the
8263 backend if they exist. */
8264 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8265 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8267 return fold_build1 (BIT_NOT_EXPR, type,
8268 build2 (BIT_IOR_EXPR, type,
8269 TREE_OPERAND (arg0, 0),
8270 TREE_OPERAND (arg1, 0)));
8276 /* Don't touch a floating-point divide by zero unless the mode
8277 of the constant can represent infinity. */
8278 if (TREE_CODE (arg1) == REAL_CST
8279 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8280 && real_zerop (arg1))
8283 /* Optimize A / A to 1.0 if we don't care about
8284 NaNs or Infinities. */
8285 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8286 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8287 && operand_equal_p (arg0, arg1, 0))
8289 tree r = build_real (TREE_TYPE (arg0), dconst1);
8291 return omit_two_operands (type, r, arg0, arg1);
8294 /* (-A) / (-B) -> A / B */
8295 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8296 return fold_build2 (RDIV_EXPR, type,
8297 TREE_OPERAND (arg0, 0),
8298 negate_expr (arg1));
8299 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8300 return fold_build2 (RDIV_EXPR, type,
8302 TREE_OPERAND (arg1, 0));
8304 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8305 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8306 && real_onep (arg1))
8307 return non_lvalue (fold_convert (type, arg0));
8309 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8310 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8311 && real_minus_onep (arg1))
8312 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8314 /* If ARG1 is a constant, we can convert this to a multiply by the
8315 reciprocal. This does not have the same rounding properties,
8316 so only do this if -funsafe-math-optimizations. We can actually
8317 always safely do it if ARG1 is a power of two, but it's hard to
8318 tell if it is or not in a portable manner. */
8319 if (TREE_CODE (arg1) == REAL_CST)
8321 if (flag_unsafe_math_optimizations
8322 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8324 return fold_build2 (MULT_EXPR, type, arg0, tem);
8325 /* Find the reciprocal if optimizing and the result is exact. */
8329 r = TREE_REAL_CST (arg1);
8330 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8332 tem = build_real (type, r);
8333 return fold_build2 (MULT_EXPR, type,
8334 fold_convert (type, arg0), tem);
8338 /* Convert A/B/C to A/(B*C). */
8339 if (flag_unsafe_math_optimizations
8340 && TREE_CODE (arg0) == RDIV_EXPR)
8341 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8342 fold_build2 (MULT_EXPR, type,
8343 TREE_OPERAND (arg0, 1), arg1));
8345 /* Convert A/(B/C) to (A/B)*C. */
8346 if (flag_unsafe_math_optimizations
8347 && TREE_CODE (arg1) == RDIV_EXPR)
8348 return fold_build2 (MULT_EXPR, type,
8349 fold_build2 (RDIV_EXPR, type, arg0,
8350 TREE_OPERAND (arg1, 0)),
8351 TREE_OPERAND (arg1, 1));
8353 /* Convert C1/(X*C2) into (C1/C2)/X. */
8354 if (flag_unsafe_math_optimizations
8355 && TREE_CODE (arg1) == MULT_EXPR
8356 && TREE_CODE (arg0) == REAL_CST
8357 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8359 tree tem = const_binop (RDIV_EXPR, arg0,
8360 TREE_OPERAND (arg1, 1), 0);
8362 return fold_build2 (RDIV_EXPR, type, tem,
8363 TREE_OPERAND (arg1, 0));
8366 if (flag_unsafe_math_optimizations)
8368 enum built_in_function fcode = builtin_mathfn_code (arg1);
8369 /* Optimize x/expN(y) into x*expN(-y). */
8370 if (BUILTIN_EXPONENT_P (fcode))
8372 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8373 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8374 tree arglist = build_tree_list (NULL_TREE,
8375 fold_convert (type, arg));
8376 arg1 = build_function_call_expr (expfn, arglist);
8377 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8380 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8381 if (fcode == BUILT_IN_POW
8382 || fcode == BUILT_IN_POWF
8383 || fcode == BUILT_IN_POWL)
8385 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8386 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8387 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8388 tree neg11 = fold_convert (type, negate_expr (arg11));
8389 tree arglist = tree_cons(NULL_TREE, arg10,
8390 build_tree_list (NULL_TREE, neg11));
8391 arg1 = build_function_call_expr (powfn, arglist);
8392 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8396 if (flag_unsafe_math_optimizations)
8398 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8399 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8401 /* Optimize sin(x)/cos(x) as tan(x). */
8402 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8403 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8404 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8405 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8406 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8408 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8410 if (tanfn != NULL_TREE)
8411 return build_function_call_expr (tanfn,
8412 TREE_OPERAND (arg0, 1));
8415 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8416 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8417 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8418 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8419 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8420 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8422 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8424 if (tanfn != NULL_TREE)
8426 tree tmp = TREE_OPERAND (arg0, 1);
8427 tmp = build_function_call_expr (tanfn, tmp);
8428 return fold_build2 (RDIV_EXPR, type,
8429 build_real (type, dconst1), tmp);
8433 /* Optimize pow(x,c)/x as pow(x,c-1). */
8434 if (fcode0 == BUILT_IN_POW
8435 || fcode0 == BUILT_IN_POWF
8436 || fcode0 == BUILT_IN_POWL)
8438 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8439 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8440 if (TREE_CODE (arg01) == REAL_CST
8441 && ! TREE_CONSTANT_OVERFLOW (arg01)
8442 && operand_equal_p (arg1, arg00, 0))
8444 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8448 c = TREE_REAL_CST (arg01);
8449 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8450 arg = build_real (type, c);
8451 arglist = build_tree_list (NULL_TREE, arg);
8452 arglist = tree_cons (NULL_TREE, arg1, arglist);
8453 return build_function_call_expr (powfn, arglist);
8459 case TRUNC_DIV_EXPR:
8460 case ROUND_DIV_EXPR:
8461 case FLOOR_DIV_EXPR:
8463 case EXACT_DIV_EXPR:
8464 if (integer_onep (arg1))
8465 return non_lvalue (fold_convert (type, arg0));
8466 if (integer_zerop (arg1))
8469 if (!TYPE_UNSIGNED (type)
8470 && TREE_CODE (arg1) == INTEGER_CST
8471 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8472 && TREE_INT_CST_HIGH (arg1) == -1)
8473 return fold_convert (type, negate_expr (arg0));
8475 /* Convert -A / -B to A / B when the type is signed and overflow is
8477 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8478 && TREE_CODE (arg0) == NEGATE_EXPR
8479 && negate_expr_p (arg1))
8480 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8481 negate_expr (arg1));
8482 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8483 && TREE_CODE (arg1) == NEGATE_EXPR
8484 && negate_expr_p (arg0))
8485 return fold_build2 (code, type, negate_expr (arg0),
8486 TREE_OPERAND (arg1, 0));
8488 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8489 operation, EXACT_DIV_EXPR.
8491 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8492 At one time others generated faster code, it's not clear if they do
8493 after the last round to changes to the DIV code in expmed.c. */
8494 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8495 && multiple_of_p (type, arg0, arg1))
8496 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8498 if (TREE_CODE (arg1) == INTEGER_CST
8499 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8500 return fold_convert (type, tem);
8505 case FLOOR_MOD_EXPR:
8506 case ROUND_MOD_EXPR:
8507 case TRUNC_MOD_EXPR:
8508 /* X % 1 is always zero, but be sure to preserve any side
8510 if (integer_onep (arg1))
8511 return omit_one_operand (type, integer_zero_node, arg0);
8513 /* X % 0, return X % 0 unchanged so that we can get the
8514 proper warnings and errors. */
8515 if (integer_zerop (arg1))
8518 /* 0 % X is always zero, but be sure to preserve any side
8519 effects in X. Place this after checking for X == 0. */
8520 if (integer_zerop (arg0))
8521 return omit_one_operand (type, integer_zero_node, arg1);
8523 /* X % -1 is zero. */
8524 if (!TYPE_UNSIGNED (type)
8525 && TREE_CODE (arg1) == INTEGER_CST
8526 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8527 && TREE_INT_CST_HIGH (arg1) == -1)
8528 return omit_one_operand (type, integer_zero_node, arg0);
8530 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8531 i.e. "X % C" into "X & C2", if X and C are positive. */
8532 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8533 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8534 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8536 unsigned HOST_WIDE_INT high, low;
8540 l = tree_log2 (arg1);
8541 if (l >= HOST_BITS_PER_WIDE_INT)
8543 high = ((unsigned HOST_WIDE_INT) 1
8544 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8550 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8553 mask = build_int_cst_wide (type, low, high);
8554 return fold_build2 (BIT_AND_EXPR, type,
8555 fold_convert (type, arg0), mask);
8558 /* X % -C is the same as X % C. */
8559 if (code == TRUNC_MOD_EXPR
8560 && !TYPE_UNSIGNED (type)
8561 && TREE_CODE (arg1) == INTEGER_CST
8562 && !TREE_CONSTANT_OVERFLOW (arg1)
8563 && TREE_INT_CST_HIGH (arg1) < 0
8565 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8566 && !sign_bit_p (arg1, arg1))
8567 return fold_build2 (code, type, fold_convert (type, arg0),
8568 fold_convert (type, negate_expr (arg1)));
8570 /* X % -Y is the same as X % Y. */
8571 if (code == TRUNC_MOD_EXPR
8572 && !TYPE_UNSIGNED (type)
8573 && TREE_CODE (arg1) == NEGATE_EXPR
8575 return fold_build2 (code, type, fold_convert (type, arg0),
8576 fold_convert (type, TREE_OPERAND (arg1, 0)));
8578 if (TREE_CODE (arg1) == INTEGER_CST
8579 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8580 return fold_convert (type, tem);
8586 if (integer_all_onesp (arg0))
8587 return omit_one_operand (type, arg0, arg1);
8591 /* Optimize -1 >> x for arithmetic right shifts. */
8592 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8593 return omit_one_operand (type, arg0, arg1);
8594 /* ... fall through ... */
8598 if (integer_zerop (arg1))
8599 return non_lvalue (fold_convert (type, arg0));
8600 if (integer_zerop (arg0))
8601 return omit_one_operand (type, arg0, arg1);
8603 /* Since negative shift count is not well-defined,
8604 don't try to compute it in the compiler. */
8605 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8608 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8609 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8610 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8611 && host_integerp (TREE_OPERAND (arg0, 1), false)
8612 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8614 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8615 + TREE_INT_CST_LOW (arg1));
8617 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8618 being well defined. */
8619 if (low >= TYPE_PRECISION (type))
8621 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8622 low = low % TYPE_PRECISION (type);
8623 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8624 return build_int_cst (type, 0);
8626 low = TYPE_PRECISION (type) - 1;
8629 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8630 build_int_cst (type, low));
8633 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8634 into x & ((unsigned)-1 >> c) for unsigned types. */
8635 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8636 || (TYPE_UNSIGNED (type)
8637 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8638 && host_integerp (arg1, false)
8639 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8640 && host_integerp (TREE_OPERAND (arg0, 1), false)
8641 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8643 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8644 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8650 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8652 lshift = build_int_cst (type, -1);
8653 lshift = int_const_binop (code, lshift, arg1, 0);
8655 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8659 /* Rewrite an LROTATE_EXPR by a constant into an
8660 RROTATE_EXPR by a new constant. */
8661 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8663 tree tem = build_int_cst (NULL_TREE,
8664 GET_MODE_BITSIZE (TYPE_MODE (type)));
8665 tem = fold_convert (TREE_TYPE (arg1), tem);
8666 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8667 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8670 /* If we have a rotate of a bit operation with the rotate count and
8671 the second operand of the bit operation both constant,
8672 permute the two operations. */
8673 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8674 && (TREE_CODE (arg0) == BIT_AND_EXPR
8675 || TREE_CODE (arg0) == BIT_IOR_EXPR
8676 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8677 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8678 return fold_build2 (TREE_CODE (arg0), type,
8679 fold_build2 (code, type,
8680 TREE_OPERAND (arg0, 0), arg1),
8681 fold_build2 (code, type,
8682 TREE_OPERAND (arg0, 1), arg1));
8684 /* Two consecutive rotates adding up to the width of the mode can
8686 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8687 && TREE_CODE (arg0) == RROTATE_EXPR
8688 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8689 && TREE_INT_CST_HIGH (arg1) == 0
8690 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8691 && ((TREE_INT_CST_LOW (arg1)
8692 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8693 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8694 return TREE_OPERAND (arg0, 0);
8699 if (operand_equal_p (arg0, arg1, 0))
8700 return omit_one_operand (type, arg0, arg1);
8701 if (INTEGRAL_TYPE_P (type)
8702 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8703 return omit_one_operand (type, arg1, arg0);
8707 if (operand_equal_p (arg0, arg1, 0))
8708 return omit_one_operand (type, arg0, arg1);
8709 if (INTEGRAL_TYPE_P (type)
8710 && TYPE_MAX_VALUE (type)
8711 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8712 return omit_one_operand (type, arg1, arg0);
8715 case TRUTH_ANDIF_EXPR:
8716 /* Note that the operands of this must be ints
8717 and their values must be 0 or 1.
8718 ("true" is a fixed value perhaps depending on the language.) */
8719 /* If first arg is constant zero, return it. */
8720 if (integer_zerop (arg0))
8721 return fold_convert (type, arg0);
8722 case TRUTH_AND_EXPR:
8723 /* If either arg is constant true, drop it. */
8724 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8725 return non_lvalue (fold_convert (type, arg1));
8726 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8727 /* Preserve sequence points. */
8728 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8729 return non_lvalue (fold_convert (type, arg0));
8730 /* If second arg is constant zero, result is zero, but first arg
8731 must be evaluated. */
8732 if (integer_zerop (arg1))
8733 return omit_one_operand (type, arg1, arg0);
8734 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8735 case will be handled here. */
8736 if (integer_zerop (arg0))
8737 return omit_one_operand (type, arg0, arg1);
8739 /* !X && X is always false. */
8740 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8741 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8742 return omit_one_operand (type, integer_zero_node, arg1);
8743 /* X && !X is always false. */
8744 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8745 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8746 return omit_one_operand (type, integer_zero_node, arg0);
8748 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8749 means A >= Y && A != MAX, but in this case we know that
8752 if (!TREE_SIDE_EFFECTS (arg0)
8753 && !TREE_SIDE_EFFECTS (arg1))
8755 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8756 if (tem && !operand_equal_p (tem, arg0, 0))
8757 return fold_build2 (code, type, tem, arg1);
8759 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8760 if (tem && !operand_equal_p (tem, arg1, 0))
8761 return fold_build2 (code, type, arg0, tem);
8765 /* We only do these simplifications if we are optimizing. */
8769 /* Check for things like (A || B) && (A || C). We can convert this
8770 to A || (B && C). Note that either operator can be any of the four
8771 truth and/or operations and the transformation will still be
8772 valid. Also note that we only care about order for the
8773 ANDIF and ORIF operators. If B contains side effects, this
8774 might change the truth-value of A. */
8775 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8776 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8777 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8778 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8779 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8780 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8782 tree a00 = TREE_OPERAND (arg0, 0);
8783 tree a01 = TREE_OPERAND (arg0, 1);
8784 tree a10 = TREE_OPERAND (arg1, 0);
8785 tree a11 = TREE_OPERAND (arg1, 1);
8786 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8787 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8788 && (code == TRUTH_AND_EXPR
8789 || code == TRUTH_OR_EXPR));
8791 if (operand_equal_p (a00, a10, 0))
8792 return fold_build2 (TREE_CODE (arg0), type, a00,
8793 fold_build2 (code, type, a01, a11));
8794 else if (commutative && operand_equal_p (a00, a11, 0))
8795 return fold_build2 (TREE_CODE (arg0), type, a00,
8796 fold_build2 (code, type, a01, a10));
8797 else if (commutative && operand_equal_p (a01, a10, 0))
8798 return fold_build2 (TREE_CODE (arg0), type, a01,
8799 fold_build2 (code, type, a00, a11));
8801 /* This case if tricky because we must either have commutative
8802 operators or else A10 must not have side-effects. */
8804 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8805 && operand_equal_p (a01, a11, 0))
8806 return fold_build2 (TREE_CODE (arg0), type,
8807 fold_build2 (code, type, a00, a10),
8811 /* See if we can build a range comparison. */
8812 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8815 /* Check for the possibility of merging component references. If our
8816 lhs is another similar operation, try to merge its rhs with our
8817 rhs. Then try to merge our lhs and rhs. */
8818 if (TREE_CODE (arg0) == code
8819 && 0 != (tem = fold_truthop (code, type,
8820 TREE_OPERAND (arg0, 1), arg1)))
8821 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8823 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8828 case TRUTH_ORIF_EXPR:
8829 /* Note that the operands of this must be ints
8830 and their values must be 0 or true.
8831 ("true" is a fixed value perhaps depending on the language.) */
8832 /* If first arg is constant true, return it. */
8833 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8834 return fold_convert (type, arg0);
8836 /* If either arg is constant zero, drop it. */
8837 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8838 return non_lvalue (fold_convert (type, arg1));
8839 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8840 /* Preserve sequence points. */
8841 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8842 return non_lvalue (fold_convert (type, arg0));
8843 /* If second arg is constant true, result is true, but we must
8844 evaluate first arg. */
8845 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8846 return omit_one_operand (type, arg1, arg0);
8847 /* Likewise for first arg, but note this only occurs here for
8849 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8850 return omit_one_operand (type, arg0, arg1);
8852 /* !X || X is always true. */
8853 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8854 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8855 return omit_one_operand (type, integer_one_node, arg1);
8856 /* X || !X is always true. */
8857 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8858 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8859 return omit_one_operand (type, integer_one_node, arg0);
8863 case TRUTH_XOR_EXPR:
8864 /* If the second arg is constant zero, drop it. */
8865 if (integer_zerop (arg1))
8866 return non_lvalue (fold_convert (type, arg0));
8867 /* If the second arg is constant true, this is a logical inversion. */
8868 if (integer_onep (arg1))
8870 /* Only call invert_truthvalue if operand is a truth value. */
8871 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8872 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8874 tem = invert_truthvalue (arg0);
8875 return non_lvalue (fold_convert (type, tem));
8877 /* Identical arguments cancel to zero. */
8878 if (operand_equal_p (arg0, arg1, 0))
8879 return omit_one_operand (type, integer_zero_node, arg0);
8881 /* !X ^ X is always true. */
8882 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8883 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8884 return omit_one_operand (type, integer_one_node, arg1);
8886 /* X ^ !X is always true. */
8887 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8888 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8889 return omit_one_operand (type, integer_one_node, arg0);
8899 /* If one arg is a real or integer constant, put it last. */
8900 if (tree_swap_operands_p (arg0, arg1, true))
8901 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8903 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
8904 if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
8905 && (code == NE_EXPR || code == EQ_EXPR))
8906 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8907 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8910 /* bool_var != 0 becomes bool_var. */
8911 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8913 return non_lvalue (fold_convert (type, arg0));
8915 /* bool_var == 1 becomes bool_var. */
8916 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8918 return non_lvalue (fold_convert (type, arg0));
8920 /* bool_var != 1 becomes !bool_var. */
8921 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8923 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8925 /* bool_var == 0 becomes !bool_var. */
8926 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8928 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8930 /* If this is an equality comparison of the address of a non-weak
8931 object against zero, then we know the result. */
8932 if ((code == EQ_EXPR || code == NE_EXPR)
8933 && TREE_CODE (arg0) == ADDR_EXPR
8934 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8935 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8936 && integer_zerop (arg1))
8937 return constant_boolean_node (code != EQ_EXPR, type);
8939 /* If this is an equality comparison of the address of two non-weak,
8940 unaliased symbols neither of which are extern (since we do not
8941 have access to attributes for externs), then we know the result. */
8942 if ((code == EQ_EXPR || code == NE_EXPR)
8943 && TREE_CODE (arg0) == ADDR_EXPR
8944 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8945 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8946 && ! lookup_attribute ("alias",
8947 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8948 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8949 && TREE_CODE (arg1) == ADDR_EXPR
8950 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8951 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8952 && ! lookup_attribute ("alias",
8953 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8954 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8956 /* We know that we're looking at the address of two
8957 non-weak, unaliased, static _DECL nodes.
8959 It is both wasteful and incorrect to call operand_equal_p
8960 to compare the two ADDR_EXPR nodes. It is wasteful in that
8961 all we need to do is test pointer equality for the arguments
8962 to the two ADDR_EXPR nodes. It is incorrect to use
8963 operand_equal_p as that function is NOT equivalent to a
8964 C equality test. It can in fact return false for two
8965 objects which would test as equal using the C equality
8967 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8968 return constant_boolean_node (equal
8969 ? code == EQ_EXPR : code != EQ_EXPR,
8973 /* If this is a comparison of two exprs that look like an
8974 ARRAY_REF of the same object, then we can fold this to a
8975 comparison of the two offsets. */
8976 if (TREE_CODE_CLASS (code) == tcc_comparison)
8978 tree base0, offset0, base1, offset1;
8980 if (extract_array_ref (arg0, &base0, &offset0)
8981 && extract_array_ref (arg1, &base1, &offset1)
8982 && operand_equal_p (base0, base1, 0))
8984 /* Handle no offsets on both sides specially. */
8985 if (offset0 == NULL_TREE
8986 && offset1 == NULL_TREE)
8987 return fold_build2 (code, type, integer_zero_node,
8990 if (!offset0 || !offset1
8991 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8993 if (offset0 == NULL_TREE)
8994 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8995 if (offset1 == NULL_TREE)
8996 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8997 return fold_build2 (code, type, offset0, offset1);
9002 /* Transform comparisons of the form X +- C CMP X. */
9003 if ((code != EQ_EXPR && code != NE_EXPR)
9004 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9005 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9006 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9007 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9008 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9009 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9010 && !(flag_wrapv || flag_trapv))))
9012 tree arg01 = TREE_OPERAND (arg0, 1);
9013 enum tree_code code0 = TREE_CODE (arg0);
9016 if (TREE_CODE (arg01) == REAL_CST)
9017 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9019 is_positive = tree_int_cst_sgn (arg01);
9021 /* (X - c) > X becomes false. */
9023 && ((code0 == MINUS_EXPR && is_positive >= 0)
9024 || (code0 == PLUS_EXPR && is_positive <= 0)))
9025 return constant_boolean_node (0, type);
9027 /* Likewise (X + c) < X becomes false. */
9029 && ((code0 == PLUS_EXPR && is_positive >= 0)
9030 || (code0 == MINUS_EXPR && is_positive <= 0)))
9031 return constant_boolean_node (0, type);
9033 /* Convert (X - c) <= X to true. */
9034 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9036 && ((code0 == MINUS_EXPR && is_positive >= 0)
9037 || (code0 == PLUS_EXPR && is_positive <= 0)))
9038 return constant_boolean_node (1, type);
9040 /* Convert (X + c) >= X to true. */
9041 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9043 && ((code0 == PLUS_EXPR && is_positive >= 0)
9044 || (code0 == MINUS_EXPR && is_positive <= 0)))
9045 return constant_boolean_node (1, type);
9047 if (TREE_CODE (arg01) == INTEGER_CST)
9049 /* Convert X + c > X and X - c < X to true for integers. */
9051 && ((code0 == PLUS_EXPR && is_positive > 0)
9052 || (code0 == MINUS_EXPR && is_positive < 0)))
9053 return constant_boolean_node (1, type);
9056 && ((code0 == MINUS_EXPR && is_positive > 0)
9057 || (code0 == PLUS_EXPR && is_positive < 0)))
9058 return constant_boolean_node (1, type);
9060 /* Convert X + c <= X and X - c >= X to false for integers. */
9062 && ((code0 == PLUS_EXPR && is_positive > 0)
9063 || (code0 == MINUS_EXPR && is_positive < 0)))
9064 return constant_boolean_node (0, type);
9067 && ((code0 == MINUS_EXPR && is_positive > 0)
9068 || (code0 == PLUS_EXPR && is_positive < 0)))
9069 return constant_boolean_node (0, type);
9073 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9074 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9075 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9076 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9077 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9078 && !(flag_wrapv || flag_trapv))
9079 && (TREE_CODE (arg1) == INTEGER_CST
9080 && !TREE_OVERFLOW (arg1)))
9082 tree const1 = TREE_OPERAND (arg0, 1);
9084 tree variable = TREE_OPERAND (arg0, 0);
9087 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9089 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9090 TREE_TYPE (arg1), const2, const1);
9091 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9092 && (TREE_CODE (lhs) != INTEGER_CST
9093 || !TREE_OVERFLOW (lhs)))
9094 return fold_build2 (code, type, variable, lhs);
9097 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9099 tree targ0 = strip_float_extensions (arg0);
9100 tree targ1 = strip_float_extensions (arg1);
9101 tree newtype = TREE_TYPE (targ0);
9103 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9104 newtype = TREE_TYPE (targ1);
9106 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9107 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9108 return fold_build2 (code, type, fold_convert (newtype, targ0),
9109 fold_convert (newtype, targ1));
9111 /* (-a) CMP (-b) -> b CMP a */
9112 if (TREE_CODE (arg0) == NEGATE_EXPR
9113 && TREE_CODE (arg1) == NEGATE_EXPR)
9114 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9115 TREE_OPERAND (arg0, 0));
9117 if (TREE_CODE (arg1) == REAL_CST)
9119 REAL_VALUE_TYPE cst;
9120 cst = TREE_REAL_CST (arg1);
9122 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9123 if (TREE_CODE (arg0) == NEGATE_EXPR)
9125 fold_build2 (swap_tree_comparison (code), type,
9126 TREE_OPERAND (arg0, 0),
9127 build_real (TREE_TYPE (arg1),
9128 REAL_VALUE_NEGATE (cst)));
9130 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9131 /* a CMP (-0) -> a CMP 0 */
9132 if (REAL_VALUE_MINUS_ZERO (cst))
9133 return fold_build2 (code, type, arg0,
9134 build_real (TREE_TYPE (arg1), dconst0));
9136 /* x != NaN is always true, other ops are always false. */
9137 if (REAL_VALUE_ISNAN (cst)
9138 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9140 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9141 return omit_one_operand (type, tem, arg0);
9144 /* Fold comparisons against infinity. */
9145 if (REAL_VALUE_ISINF (cst))
9147 tem = fold_inf_compare (code, type, arg0, arg1);
9148 if (tem != NULL_TREE)
9153 /* If this is a comparison of a real constant with a PLUS_EXPR
9154 or a MINUS_EXPR of a real constant, we can convert it into a
9155 comparison with a revised real constant as long as no overflow
9156 occurs when unsafe_math_optimizations are enabled. */
9157 if (flag_unsafe_math_optimizations
9158 && TREE_CODE (arg1) == REAL_CST
9159 && (TREE_CODE (arg0) == PLUS_EXPR
9160 || TREE_CODE (arg0) == MINUS_EXPR)
9161 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9162 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9163 ? MINUS_EXPR : PLUS_EXPR,
9164 arg1, TREE_OPERAND (arg0, 1), 0))
9165 && ! TREE_CONSTANT_OVERFLOW (tem))
9166 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9168 /* Likewise, we can simplify a comparison of a real constant with
9169 a MINUS_EXPR whose first operand is also a real constant, i.e.
9170 (c1 - x) < c2 becomes x > c1-c2. */
9171 if (flag_unsafe_math_optimizations
9172 && TREE_CODE (arg1) == REAL_CST
9173 && TREE_CODE (arg0) == MINUS_EXPR
9174 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9175 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9177 && ! TREE_CONSTANT_OVERFLOW (tem))
9178 return fold_build2 (swap_tree_comparison (code), type,
9179 TREE_OPERAND (arg0, 1), tem);
9181 /* Fold comparisons against built-in math functions. */
9182 if (TREE_CODE (arg1) == REAL_CST
9183 && flag_unsafe_math_optimizations
9184 && ! flag_errno_math)
9186 enum built_in_function fcode = builtin_mathfn_code (arg0);
9188 if (fcode != END_BUILTINS)
9190 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9191 if (tem != NULL_TREE)
9197 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9198 if (TREE_CONSTANT (arg1)
9199 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9200 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9201 /* This optimization is invalid for ordered comparisons
9202 if CONST+INCR overflows or if foo+incr might overflow.
9203 This optimization is invalid for floating point due to rounding.
9204 For pointer types we assume overflow doesn't happen. */
9205 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9206 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9207 && (code == EQ_EXPR || code == NE_EXPR))))
9209 tree varop, newconst;
9211 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9213 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9214 arg1, TREE_OPERAND (arg0, 1));
9215 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9216 TREE_OPERAND (arg0, 0),
9217 TREE_OPERAND (arg0, 1));
9221 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9222 arg1, TREE_OPERAND (arg0, 1));
9223 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9224 TREE_OPERAND (arg0, 0),
9225 TREE_OPERAND (arg0, 1));
9229 /* If VAROP is a reference to a bitfield, we must mask
9230 the constant by the width of the field. */
9231 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9232 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9233 && host_integerp (DECL_SIZE (TREE_OPERAND
9234 (TREE_OPERAND (varop, 0), 1)), 1))
9236 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9237 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9238 tree folded_compare, shift;
9240 /* First check whether the comparison would come out
9241 always the same. If we don't do that we would
9242 change the meaning with the masking. */
9243 folded_compare = fold_build2 (code, type,
9244 TREE_OPERAND (varop, 0), arg1);
9245 if (integer_zerop (folded_compare)
9246 || integer_onep (folded_compare))
9247 return omit_one_operand (type, folded_compare, varop);
9249 shift = build_int_cst (NULL_TREE,
9250 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9251 shift = fold_convert (TREE_TYPE (varop), shift);
9252 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9254 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9258 return fold_build2 (code, type, varop, newconst);
9261 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9262 This transformation affects the cases which are handled in later
9263 optimizations involving comparisons with non-negative constants. */
9264 if (TREE_CODE (arg1) == INTEGER_CST
9265 && TREE_CODE (arg0) != INTEGER_CST
9266 && tree_int_cst_sgn (arg1) > 0)
9271 arg1 = const_binop (MINUS_EXPR, arg1,
9272 build_int_cst (TREE_TYPE (arg1), 1), 0);
9273 return fold_build2 (GT_EXPR, type, arg0,
9274 fold_convert (TREE_TYPE (arg0), arg1));
9277 arg1 = const_binop (MINUS_EXPR, arg1,
9278 build_int_cst (TREE_TYPE (arg1), 1), 0);
9279 return fold_build2 (LE_EXPR, type, arg0,
9280 fold_convert (TREE_TYPE (arg0), arg1));
9287 /* Comparisons with the highest or lowest possible integer of
9288 the specified size will have known values. */
9290 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9292 if (TREE_CODE (arg1) == INTEGER_CST
9293 && ! TREE_CONSTANT_OVERFLOW (arg1)
9294 && width <= 2 * HOST_BITS_PER_WIDE_INT
9295 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9296 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9298 HOST_WIDE_INT signed_max_hi;
9299 unsigned HOST_WIDE_INT signed_max_lo;
9300 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9302 if (width <= HOST_BITS_PER_WIDE_INT)
9304 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9309 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9311 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9317 max_lo = signed_max_lo;
9318 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9324 width -= HOST_BITS_PER_WIDE_INT;
9326 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9331 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9333 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9338 max_hi = signed_max_hi;
9339 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9343 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9344 && TREE_INT_CST_LOW (arg1) == max_lo)
9348 return omit_one_operand (type, integer_zero_node, arg0);
9351 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9354 return omit_one_operand (type, integer_one_node, arg0);
9357 return fold_build2 (NE_EXPR, type, arg0, arg1);
9359 /* The GE_EXPR and LT_EXPR cases above are not normally
9360 reached because of previous transformations. */
9365 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9367 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9371 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9372 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9374 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9375 return fold_build2 (NE_EXPR, type, arg0, arg1);
9379 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9381 && TREE_INT_CST_LOW (arg1) == min_lo)
9385 return omit_one_operand (type, integer_zero_node, arg0);
9388 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9391 return omit_one_operand (type, integer_one_node, arg0);
9394 return fold_build2 (NE_EXPR, type, op0, op1);
9399 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9401 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9405 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9406 return fold_build2 (NE_EXPR, type, arg0, arg1);
9408 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9409 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9414 else if (!in_gimple_form
9415 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9416 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9417 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9418 /* signed_type does not work on pointer types. */
9419 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9421 /* The following case also applies to X < signed_max+1
9422 and X >= signed_max+1 because previous transformations. */
9423 if (code == LE_EXPR || code == GT_EXPR)
9426 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9427 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9428 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9429 type, fold_convert (st0, arg0),
9430 build_int_cst (st1, 0));
9436 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9437 a MINUS_EXPR of a constant, we can convert it into a comparison with
9438 a revised constant as long as no overflow occurs. */
9439 if ((code == EQ_EXPR || code == NE_EXPR)
9440 && TREE_CODE (arg1) == INTEGER_CST
9441 && (TREE_CODE (arg0) == PLUS_EXPR
9442 || TREE_CODE (arg0) == MINUS_EXPR)
9443 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9444 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9445 ? MINUS_EXPR : PLUS_EXPR,
9446 arg1, TREE_OPERAND (arg0, 1), 0))
9447 && ! TREE_CONSTANT_OVERFLOW (tem))
9448 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9450 /* Similarly for a NEGATE_EXPR. */
9451 else if ((code == EQ_EXPR || code == NE_EXPR)
9452 && TREE_CODE (arg0) == NEGATE_EXPR
9453 && TREE_CODE (arg1) == INTEGER_CST
9454 && 0 != (tem = negate_expr (arg1))
9455 && TREE_CODE (tem) == INTEGER_CST
9456 && ! TREE_CONSTANT_OVERFLOW (tem))
9457 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9459 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9460 for !=. Don't do this for ordered comparisons due to overflow. */
9461 else if ((code == NE_EXPR || code == EQ_EXPR)
9462 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9463 return fold_build2 (code, type,
9464 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9466 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9467 && (TREE_CODE (arg0) == NOP_EXPR
9468 || TREE_CODE (arg0) == CONVERT_EXPR))
9470 /* If we are widening one operand of an integer comparison,
9471 see if the other operand is similarly being widened. Perhaps we
9472 can do the comparison in the narrower type. */
9473 tem = fold_widened_comparison (code, type, arg0, arg1);
9477 /* Or if we are changing signedness. */
9478 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9483 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9484 constant, we can simplify it. */
9485 else if (TREE_CODE (arg1) == INTEGER_CST
9486 && (TREE_CODE (arg0) == MIN_EXPR
9487 || TREE_CODE (arg0) == MAX_EXPR)
9488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9490 tem = optimize_minmax_comparison (code, type, op0, op1);
9497 /* If we are comparing an ABS_EXPR with a constant, we can
9498 convert all the cases into explicit comparisons, but they may
9499 well not be faster than doing the ABS and one comparison.
9500 But ABS (X) <= C is a range comparison, which becomes a subtraction
9501 and a comparison, and is probably faster. */
9502 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9503 && TREE_CODE (arg0) == ABS_EXPR
9504 && ! TREE_SIDE_EFFECTS (arg0)
9505 && (0 != (tem = negate_expr (arg1)))
9506 && TREE_CODE (tem) == INTEGER_CST
9507 && ! TREE_CONSTANT_OVERFLOW (tem))
9508 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9509 build2 (GE_EXPR, type,
9510 TREE_OPERAND (arg0, 0), tem),
9511 build2 (LE_EXPR, type,
9512 TREE_OPERAND (arg0, 0), arg1));
9514 /* Convert ABS_EXPR<x> >= 0 to true. */
9515 else if (code == GE_EXPR
9516 && tree_expr_nonnegative_p (arg0)
9517 && (integer_zerop (arg1)
9518 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9519 && real_zerop (arg1))))
9520 return omit_one_operand (type, integer_one_node, arg0);
9522 /* Convert ABS_EXPR<x> < 0 to false. */
9523 else if (code == LT_EXPR
9524 && tree_expr_nonnegative_p (arg0)
9525 && (integer_zerop (arg1) || real_zerop (arg1)))
9526 return omit_one_operand (type, integer_zero_node, arg0);
9528 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9529 else if ((code == EQ_EXPR || code == NE_EXPR)
9530 && TREE_CODE (arg0) == ABS_EXPR
9531 && (integer_zerop (arg1) || real_zerop (arg1)))
9532 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9534 /* If this is an EQ or NE comparison with zero and ARG0 is
9535 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9536 two operations, but the latter can be done in one less insn
9537 on machines that have only two-operand insns or on which a
9538 constant cannot be the first operand. */
9539 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9540 && TREE_CODE (arg0) == BIT_AND_EXPR)
9542 tree arg00 = TREE_OPERAND (arg0, 0);
9543 tree arg01 = TREE_OPERAND (arg0, 1);
9544 if (TREE_CODE (arg00) == LSHIFT_EXPR
9545 && integer_onep (TREE_OPERAND (arg00, 0)))
9547 fold_build2 (code, type,
9548 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9549 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9550 arg01, TREE_OPERAND (arg00, 1)),
9551 fold_convert (TREE_TYPE (arg0),
9554 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9555 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9557 fold_build2 (code, type,
9558 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9559 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9560 arg00, TREE_OPERAND (arg01, 1)),
9561 fold_convert (TREE_TYPE (arg0),
9566 /* If this is an NE or EQ comparison of zero against the result of a
9567 signed MOD operation whose second operand is a power of 2, make
9568 the MOD operation unsigned since it is simpler and equivalent. */
9569 if ((code == NE_EXPR || code == EQ_EXPR)
9570 && integer_zerop (arg1)
9571 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9572 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9573 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9574 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9575 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9576 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9578 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9579 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9580 fold_convert (newtype,
9581 TREE_OPERAND (arg0, 0)),
9582 fold_convert (newtype,
9583 TREE_OPERAND (arg0, 1)));
9585 return fold_build2 (code, type, newmod,
9586 fold_convert (newtype, arg1));
9589 /* If this is an NE comparison of zero with an AND of one, remove the
9590 comparison since the AND will give the correct value. */
9591 if (code == NE_EXPR && integer_zerop (arg1)
9592 && TREE_CODE (arg0) == BIT_AND_EXPR
9593 && integer_onep (TREE_OPERAND (arg0, 1)))
9594 return fold_convert (type, arg0);
9596 /* If we have (A & C) == C where C is a power of 2, convert this into
9597 (A & C) != 0. Similarly for NE_EXPR. */
9598 if ((code == EQ_EXPR || code == NE_EXPR)
9599 && TREE_CODE (arg0) == BIT_AND_EXPR
9600 && integer_pow2p (TREE_OPERAND (arg0, 1))
9601 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9602 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9603 arg0, fold_convert (TREE_TYPE (arg0),
9604 integer_zero_node));
9606 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9607 bit, then fold the expression into A < 0 or A >= 0. */
9608 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9612 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9613 Similarly for NE_EXPR. */
9614 if ((code == EQ_EXPR || code == NE_EXPR)
9615 && TREE_CODE (arg0) == BIT_AND_EXPR
9616 && TREE_CODE (arg1) == INTEGER_CST
9617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9619 tree notc = fold_build1 (BIT_NOT_EXPR,
9620 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9621 TREE_OPERAND (arg0, 1));
9622 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9624 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9625 if (integer_nonzerop (dandnotc))
9626 return omit_one_operand (type, rslt, arg0);
9629 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9630 Similarly for NE_EXPR. */
9631 if ((code == EQ_EXPR || code == NE_EXPR)
9632 && TREE_CODE (arg0) == BIT_IOR_EXPR
9633 && TREE_CODE (arg1) == INTEGER_CST
9634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9636 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9637 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9638 TREE_OPERAND (arg0, 1), notd);
9639 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9640 if (integer_nonzerop (candnotd))
9641 return omit_one_operand (type, rslt, arg0);
9644 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9645 and similarly for >= into !=. */
9646 if ((code == LT_EXPR || code == GE_EXPR)
9647 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9648 && TREE_CODE (arg1) == LSHIFT_EXPR
9649 && integer_onep (TREE_OPERAND (arg1, 0)))
9650 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9651 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9652 TREE_OPERAND (arg1, 1)),
9653 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9655 else if ((code == LT_EXPR || code == GE_EXPR)
9656 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9657 && (TREE_CODE (arg1) == NOP_EXPR
9658 || TREE_CODE (arg1) == CONVERT_EXPR)
9659 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9660 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9662 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9663 fold_convert (TREE_TYPE (arg0),
9664 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9665 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9667 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9669 /* Simplify comparison of something with itself. (For IEEE
9670 floating-point, we can only do some of these simplifications.) */
9671 if (operand_equal_p (arg0, arg1, 0))
9676 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9677 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9678 return constant_boolean_node (1, type);
9683 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9684 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9685 return constant_boolean_node (1, type);
9686 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9689 /* For NE, we can only do this simplification if integer
9690 or we don't honor IEEE floating point NaNs. */
9691 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9692 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9694 /* ... fall through ... */
9697 return constant_boolean_node (0, type);
9703 /* If we are comparing an expression that just has comparisons
9704 of two integer values, arithmetic expressions of those comparisons,
9705 and constants, we can simplify it. There are only three cases
9706 to check: the two values can either be equal, the first can be
9707 greater, or the second can be greater. Fold the expression for
9708 those three values. Since each value must be 0 or 1, we have
9709 eight possibilities, each of which corresponds to the constant 0
9710 or 1 or one of the six possible comparisons.
9712 This handles common cases like (a > b) == 0 but also handles
9713 expressions like ((x > y) - (y > x)) > 0, which supposedly
9714 occur in macroized code. */
9716 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9718 tree cval1 = 0, cval2 = 0;
9721 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9722 /* Don't handle degenerate cases here; they should already
9723 have been handled anyway. */
9724 && cval1 != 0 && cval2 != 0
9725 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9726 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9727 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9728 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9729 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9730 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9731 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9733 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9734 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9736 /* We can't just pass T to eval_subst in case cval1 or cval2
9737 was the same as ARG1. */
9740 = fold_build2 (code, type,
9741 eval_subst (arg0, cval1, maxval,
9745 = fold_build2 (code, type,
9746 eval_subst (arg0, cval1, maxval,
9750 = fold_build2 (code, type,
9751 eval_subst (arg0, cval1, minval,
9755 /* All three of these results should be 0 or 1. Confirm they
9756 are. Then use those values to select the proper code
9759 if ((integer_zerop (high_result)
9760 || integer_onep (high_result))
9761 && (integer_zerop (equal_result)
9762 || integer_onep (equal_result))
9763 && (integer_zerop (low_result)
9764 || integer_onep (low_result)))
9766 /* Make a 3-bit mask with the high-order bit being the
9767 value for `>', the next for '=', and the low for '<'. */
9768 switch ((integer_onep (high_result) * 4)
9769 + (integer_onep (equal_result) * 2)
9770 + integer_onep (low_result))
9774 return omit_one_operand (type, integer_zero_node, arg0);
9795 return omit_one_operand (type, integer_one_node, arg0);
9799 return save_expr (build2 (code, type, cval1, cval2));
9801 return fold_build2 (code, type, cval1, cval2);
9806 /* If this is a comparison of a field, we may be able to simplify it. */
9807 if (((TREE_CODE (arg0) == COMPONENT_REF
9808 && lang_hooks.can_use_bit_fields_p ())
9809 || TREE_CODE (arg0) == BIT_FIELD_REF)
9810 && (code == EQ_EXPR || code == NE_EXPR)
9811 /* Handle the constant case even without -O
9812 to make sure the warnings are given. */
9813 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9815 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9820 /* Fold a comparison of the address of COMPONENT_REFs with the same
9821 type and component to a comparison of the address of the base
9822 object. In short, &x->a OP &y->a to x OP y and
9823 &x->a OP &y.a to x OP &y */
9824 if (TREE_CODE (arg0) == ADDR_EXPR
9825 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9826 && TREE_CODE (arg1) == ADDR_EXPR
9827 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9829 tree cref0 = TREE_OPERAND (arg0, 0);
9830 tree cref1 = TREE_OPERAND (arg1, 0);
9831 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9833 tree op0 = TREE_OPERAND (cref0, 0);
9834 tree op1 = TREE_OPERAND (cref1, 0);
9835 return fold_build2 (code, type,
9836 build_fold_addr_expr (op0),
9837 build_fold_addr_expr (op1));
9841 /* Optimize comparisons of strlen vs zero to a compare of the
9842 first character of the string vs zero. To wit,
9843 strlen(ptr) == 0 => *ptr == 0
9844 strlen(ptr) != 0 => *ptr != 0
9845 Other cases should reduce to one of these two (or a constant)
9846 due to the return value of strlen being unsigned. */
9847 if ((code == EQ_EXPR || code == NE_EXPR)
9848 && integer_zerop (arg1)
9849 && TREE_CODE (arg0) == CALL_EXPR)
9851 tree fndecl = get_callee_fndecl (arg0);
9855 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9856 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9857 && (arglist = TREE_OPERAND (arg0, 1))
9858 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9859 && ! TREE_CHAIN (arglist))
9861 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9862 return fold_build2 (code, type, iref,
9863 build_int_cst (TREE_TYPE (iref), 0));
9867 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9868 into a single range test. */
9869 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9870 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9871 && TREE_CODE (arg1) == INTEGER_CST
9872 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9873 && !integer_zerop (TREE_OPERAND (arg0, 1))
9874 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9875 && !TREE_OVERFLOW (arg1))
9877 t1 = fold_div_compare (code, type, arg0, arg1);
9878 if (t1 != NULL_TREE)
9882 if ((code == EQ_EXPR || code == NE_EXPR)
9883 && integer_zerop (arg1)
9884 && tree_expr_nonzero_p (arg0))
9886 tree res = constant_boolean_node (code==NE_EXPR, type);
9887 return omit_one_operand (type, res, arg0);
9890 t1 = fold_relational_const (code, type, arg0, arg1);
9891 return t1 == NULL_TREE ? NULL_TREE : t1;
9893 case UNORDERED_EXPR:
9901 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9903 t1 = fold_relational_const (code, type, arg0, arg1);
9904 if (t1 != NULL_TREE)
9908 /* If the first operand is NaN, the result is constant. */
9909 if (TREE_CODE (arg0) == REAL_CST
9910 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9911 && (code != LTGT_EXPR || ! flag_trapping_math))
9913 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9916 return omit_one_operand (type, t1, arg1);
9919 /* If the second operand is NaN, the result is constant. */
9920 if (TREE_CODE (arg1) == REAL_CST
9921 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9922 && (code != LTGT_EXPR || ! flag_trapping_math))
9924 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9927 return omit_one_operand (type, t1, arg0);
9930 /* Simplify unordered comparison of something with itself. */
9931 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9932 && operand_equal_p (arg0, arg1, 0))
9933 return constant_boolean_node (1, type);
9935 if (code == LTGT_EXPR
9936 && !flag_trapping_math
9937 && operand_equal_p (arg0, arg1, 0))
9938 return constant_boolean_node (0, type);
9940 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9942 tree targ0 = strip_float_extensions (arg0);
9943 tree targ1 = strip_float_extensions (arg1);
9944 tree newtype = TREE_TYPE (targ0);
9946 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9947 newtype = TREE_TYPE (targ1);
9949 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9950 return fold_build2 (code, type, fold_convert (newtype, targ0),
9951 fold_convert (newtype, targ1));
9957 /* When pedantic, a compound expression can be neither an lvalue
9958 nor an integer constant expression. */
9959 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9961 /* Don't let (0, 0) be null pointer constant. */
9962 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9963 : fold_convert (type, arg1);
9964 return pedantic_non_lvalue (tem);
9968 return build_complex (type, arg0, arg1);
9972 /* An ASSERT_EXPR should never be passed to fold_binary. */
9977 } /* switch (code) */
9980 /* Callback for walk_tree, looking for LABEL_EXPR.
9981 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9982 Do not check the sub-tree of GOTO_EXPR. */
9985 contains_label_1 (tree *tp,
9987 void *data ATTRIBUTE_UNUSED)
9989 switch (TREE_CODE (*tp))
10001 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10002 accessible from outside the sub-tree. Returns NULL_TREE if no
10003 addressable label is found. */
10006 contains_label_p (tree st)
10008 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10011 /* Fold a ternary expression of code CODE and type TYPE with operands
10012 OP0, OP1, and OP2. Return the folded expression if folding is
10013 successful. Otherwise, return NULL_TREE. */
10016 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10019 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10020 enum tree_code_class kind = TREE_CODE_CLASS (code);
10022 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10023 && TREE_CODE_LENGTH (code) == 3);
10025 /* Strip any conversions that don't change the mode. This is safe
10026 for every expression, except for a comparison expression because
10027 its signedness is derived from its operands. So, in the latter
10028 case, only strip conversions that don't change the signedness.
10030 Note that this is done as an internal manipulation within the
10031 constant folder, in order to find the simplest representation of
10032 the arguments so that their form can be studied. In any cases,
10033 the appropriate type conversions should be put back in the tree
10034 that will get out of the constant folder. */
10049 case COMPONENT_REF:
10050 if (TREE_CODE (arg0) == CONSTRUCTOR
10051 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10053 unsigned HOST_WIDE_INT idx;
10055 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10062 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10063 so all simple results must be passed through pedantic_non_lvalue. */
10064 if (TREE_CODE (arg0) == INTEGER_CST)
10066 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10067 tem = integer_zerop (arg0) ? op2 : op1;
10068 /* Only optimize constant conditions when the selected branch
10069 has the same type as the COND_EXPR. This avoids optimizing
10070 away "c ? x : throw", where the throw has a void type.
10071 Avoid throwing away that operand which contains label. */
10072 if ((!TREE_SIDE_EFFECTS (unused_op)
10073 || !contains_label_p (unused_op))
10074 && (! VOID_TYPE_P (TREE_TYPE (tem))
10075 || VOID_TYPE_P (type)))
10076 return pedantic_non_lvalue (tem);
10079 if (operand_equal_p (arg1, op2, 0))
10080 return pedantic_omit_one_operand (type, arg1, arg0);
10082 /* If we have A op B ? A : C, we may be able to convert this to a
10083 simpler expression, depending on the operation and the values
10084 of B and C. Signed zeros prevent all of these transformations,
10085 for reasons given above each one.
10087 Also try swapping the arguments and inverting the conditional. */
10088 if (COMPARISON_CLASS_P (arg0)
10089 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10090 arg1, TREE_OPERAND (arg0, 1))
10091 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10093 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10098 if (COMPARISON_CLASS_P (arg0)
10099 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10101 TREE_OPERAND (arg0, 1))
10102 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10104 tem = invert_truthvalue (arg0);
10105 if (COMPARISON_CLASS_P (tem))
10107 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10113 /* If the second operand is simpler than the third, swap them
10114 since that produces better jump optimization results. */
10115 if (truth_value_p (TREE_CODE (arg0))
10116 && tree_swap_operands_p (op1, op2, false))
10118 /* See if this can be inverted. If it can't, possibly because
10119 it was a floating-point inequality comparison, don't do
10121 tem = invert_truthvalue (arg0);
10123 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10124 return fold_build3 (code, type, tem, op2, op1);
10127 /* Convert A ? 1 : 0 to simply A. */
10128 if (integer_onep (op1)
10129 && integer_zerop (op2)
10130 /* If we try to convert OP0 to our type, the
10131 call to fold will try to move the conversion inside
10132 a COND, which will recurse. In that case, the COND_EXPR
10133 is probably the best choice, so leave it alone. */
10134 && type == TREE_TYPE (arg0))
10135 return pedantic_non_lvalue (arg0);
10137 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10138 over COND_EXPR in cases such as floating point comparisons. */
10139 if (integer_zerop (op1)
10140 && integer_onep (op2)
10141 && truth_value_p (TREE_CODE (arg0)))
10142 return pedantic_non_lvalue (fold_convert (type,
10143 invert_truthvalue (arg0)));
10145 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10146 if (TREE_CODE (arg0) == LT_EXPR
10147 && integer_zerop (TREE_OPERAND (arg0, 1))
10148 && integer_zerop (op2)
10149 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10150 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10151 TREE_TYPE (tem), tem, arg1));
10153 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10154 already handled above. */
10155 if (TREE_CODE (arg0) == BIT_AND_EXPR
10156 && integer_onep (TREE_OPERAND (arg0, 1))
10157 && integer_zerop (op2)
10158 && integer_pow2p (arg1))
10160 tree tem = TREE_OPERAND (arg0, 0);
10162 if (TREE_CODE (tem) == RSHIFT_EXPR
10163 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10164 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10165 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10166 return fold_build2 (BIT_AND_EXPR, type,
10167 TREE_OPERAND (tem, 0), arg1);
10170 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10171 is probably obsolete because the first operand should be a
10172 truth value (that's why we have the two cases above), but let's
10173 leave it in until we can confirm this for all front-ends. */
10174 if (integer_zerop (op2)
10175 && TREE_CODE (arg0) == NE_EXPR
10176 && integer_zerop (TREE_OPERAND (arg0, 1))
10177 && integer_pow2p (arg1)
10178 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10179 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10180 arg1, OEP_ONLY_CONST))
10181 return pedantic_non_lvalue (fold_convert (type,
10182 TREE_OPERAND (arg0, 0)));
10184 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10185 if (integer_zerop (op2)
10186 && truth_value_p (TREE_CODE (arg0))
10187 && truth_value_p (TREE_CODE (arg1)))
10188 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10190 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10191 if (integer_onep (op2)
10192 && truth_value_p (TREE_CODE (arg0))
10193 && truth_value_p (TREE_CODE (arg1)))
10195 /* Only perform transformation if ARG0 is easily inverted. */
10196 tem = invert_truthvalue (arg0);
10197 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10198 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10201 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10202 if (integer_zerop (arg1)
10203 && truth_value_p (TREE_CODE (arg0))
10204 && truth_value_p (TREE_CODE (op2)))
10206 /* Only perform transformation if ARG0 is easily inverted. */
10207 tem = invert_truthvalue (arg0);
10208 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10209 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10212 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10213 if (integer_onep (arg1)
10214 && truth_value_p (TREE_CODE (arg0))
10215 && truth_value_p (TREE_CODE (op2)))
10216 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10221 /* Check for a built-in function. */
10222 if (TREE_CODE (op0) == ADDR_EXPR
10223 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10224 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10225 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10228 case BIT_FIELD_REF:
10229 if (TREE_CODE (arg0) == VECTOR_CST
10230 && type == TREE_TYPE (TREE_TYPE (arg0))
10231 && host_integerp (arg1, 1)
10232 && host_integerp (op2, 1))
10234 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10235 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10238 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10239 && (idx % width) == 0
10240 && (idx = idx / width)
10241 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10243 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10244 while (idx-- > 0 && elements)
10245 elements = TREE_CHAIN (elements);
10247 return TREE_VALUE (elements);
10249 return fold_convert (type, integer_zero_node);
10256 } /* switch (code) */
10259 /* Perform constant folding and related simplification of EXPR.
10260 The related simplifications include x*1 => x, x*0 => 0, etc.,
10261 and application of the associative law.
10262 NOP_EXPR conversions may be removed freely (as long as we
10263 are careful not to change the type of the overall expression).
10264 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10265 but we can constant-fold them if they have constant operands. */
10267 #ifdef ENABLE_FOLD_CHECKING
10268 # define fold(x) fold_1 (x)
10269 static tree fold_1 (tree);
10275 const tree t = expr;
10276 enum tree_code code = TREE_CODE (t);
10277 enum tree_code_class kind = TREE_CODE_CLASS (code);
10280 /* Return right away if a constant. */
10281 if (kind == tcc_constant)
10284 if (IS_EXPR_CODE_CLASS (kind))
10286 tree type = TREE_TYPE (t);
10287 tree op0, op1, op2;
10289 switch (TREE_CODE_LENGTH (code))
10292 op0 = TREE_OPERAND (t, 0);
10293 tem = fold_unary (code, type, op0);
10294 return tem ? tem : expr;
10296 op0 = TREE_OPERAND (t, 0);
10297 op1 = TREE_OPERAND (t, 1);
10298 tem = fold_binary (code, type, op0, op1);
10299 return tem ? tem : expr;
10301 op0 = TREE_OPERAND (t, 0);
10302 op1 = TREE_OPERAND (t, 1);
10303 op2 = TREE_OPERAND (t, 2);
10304 tem = fold_ternary (code, type, op0, op1, op2);
10305 return tem ? tem : expr;
10314 return fold (DECL_INITIAL (t));
10318 } /* switch (code) */
10321 #ifdef ENABLE_FOLD_CHECKING
10324 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10325 static void fold_check_failed (tree, tree);
10326 void print_fold_checksum (tree);
10328 /* When --enable-checking=fold, compute a digest of expr before
10329 and after actual fold call to see if fold did not accidentally
10330 change original expr. */
10336 struct md5_ctx ctx;
10337 unsigned char checksum_before[16], checksum_after[16];
10340 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10341 md5_init_ctx (&ctx);
10342 fold_checksum_tree (expr, &ctx, ht);
10343 md5_finish_ctx (&ctx, checksum_before);
10346 ret = fold_1 (expr);
10348 md5_init_ctx (&ctx);
10349 fold_checksum_tree (expr, &ctx, ht);
10350 md5_finish_ctx (&ctx, checksum_after);
10353 if (memcmp (checksum_before, checksum_after, 16))
10354 fold_check_failed (expr, ret);
10360 print_fold_checksum (tree expr)
10362 struct md5_ctx ctx;
10363 unsigned char checksum[16], cnt;
10366 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10367 md5_init_ctx (&ctx);
10368 fold_checksum_tree (expr, &ctx, ht);
10369 md5_finish_ctx (&ctx, checksum);
10371 for (cnt = 0; cnt < 16; ++cnt)
10372 fprintf (stderr, "%02x", checksum[cnt]);
10373 putc ('\n', stderr);
10377 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10379 internal_error ("fold check: original tree changed by fold");
10383 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10386 enum tree_code code;
10387 char buf[sizeof (struct tree_function_decl)];
10392 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10393 <= sizeof (struct tree_function_decl))
10394 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10397 slot = htab_find_slot (ht, expr, INSERT);
10401 code = TREE_CODE (expr);
10402 if (TREE_CODE_CLASS (code) == tcc_declaration
10403 && DECL_ASSEMBLER_NAME_SET_P (expr))
10405 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10406 memcpy (buf, expr, tree_size (expr));
10408 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10410 else if (TREE_CODE_CLASS (code) == tcc_type
10411 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10412 || TYPE_CACHED_VALUES_P (expr)
10413 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10415 /* Allow these fields to be modified. */
10416 memcpy (buf, expr, tree_size (expr));
10418 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10419 TYPE_POINTER_TO (expr) = NULL;
10420 TYPE_REFERENCE_TO (expr) = NULL;
10421 if (TYPE_CACHED_VALUES_P (expr))
10423 TYPE_CACHED_VALUES_P (expr) = 0;
10424 TYPE_CACHED_VALUES (expr) = NULL;
10427 md5_process_bytes (expr, tree_size (expr), ctx);
10428 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10429 if (TREE_CODE_CLASS (code) != tcc_type
10430 && TREE_CODE_CLASS (code) != tcc_declaration
10431 && code != TREE_LIST)
10432 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10433 switch (TREE_CODE_CLASS (code))
10439 md5_process_bytes (TREE_STRING_POINTER (expr),
10440 TREE_STRING_LENGTH (expr), ctx);
10443 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10444 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10447 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10453 case tcc_exceptional:
10457 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10458 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10459 expr = TREE_CHAIN (expr);
10460 goto recursive_label;
10463 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10464 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10470 case tcc_expression:
10471 case tcc_reference:
10472 case tcc_comparison:
10475 case tcc_statement:
10476 len = TREE_CODE_LENGTH (code);
10477 for (i = 0; i < len; ++i)
10478 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10480 case tcc_declaration:
10481 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10482 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10483 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10484 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10485 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10486 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10487 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10488 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10489 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10491 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10493 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10494 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10495 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10499 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10500 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10501 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10502 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10503 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10504 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10505 if (INTEGRAL_TYPE_P (expr)
10506 || SCALAR_FLOAT_TYPE_P (expr))
10508 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10509 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10511 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10512 if (TREE_CODE (expr) == RECORD_TYPE
10513 || TREE_CODE (expr) == UNION_TYPE
10514 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10515 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10516 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10525 /* Fold a unary tree expression with code CODE of type TYPE with an
10526 operand OP0. Return a folded expression if successful. Otherwise,
10527 return a tree expression with code CODE of type TYPE with an
10531 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10534 #ifdef ENABLE_FOLD_CHECKING
10535 unsigned char checksum_before[16], checksum_after[16];
10536 struct md5_ctx ctx;
10539 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10540 md5_init_ctx (&ctx);
10541 fold_checksum_tree (op0, &ctx, ht);
10542 md5_finish_ctx (&ctx, checksum_before);
10546 tem = fold_unary (code, type, op0);
10548 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10550 #ifdef ENABLE_FOLD_CHECKING
10551 md5_init_ctx (&ctx);
10552 fold_checksum_tree (op0, &ctx, ht);
10553 md5_finish_ctx (&ctx, checksum_after);
10556 if (memcmp (checksum_before, checksum_after, 16))
10557 fold_check_failed (op0, tem);
10562 /* Fold a binary tree expression with code CODE of type TYPE with
10563 operands OP0 and OP1. Return a folded expression if successful.
10564 Otherwise, return a tree expression with code CODE of type TYPE
10565 with operands OP0 and OP1. */
10568 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10572 #ifdef ENABLE_FOLD_CHECKING
10573 unsigned char checksum_before_op0[16],
10574 checksum_before_op1[16],
10575 checksum_after_op0[16],
10576 checksum_after_op1[16];
10577 struct md5_ctx ctx;
10580 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10581 md5_init_ctx (&ctx);
10582 fold_checksum_tree (op0, &ctx, ht);
10583 md5_finish_ctx (&ctx, checksum_before_op0);
10586 md5_init_ctx (&ctx);
10587 fold_checksum_tree (op1, &ctx, ht);
10588 md5_finish_ctx (&ctx, checksum_before_op1);
10592 tem = fold_binary (code, type, op0, op1);
10594 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10596 #ifdef ENABLE_FOLD_CHECKING
10597 md5_init_ctx (&ctx);
10598 fold_checksum_tree (op0, &ctx, ht);
10599 md5_finish_ctx (&ctx, checksum_after_op0);
10602 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10603 fold_check_failed (op0, tem);
10605 md5_init_ctx (&ctx);
10606 fold_checksum_tree (op1, &ctx, ht);
10607 md5_finish_ctx (&ctx, checksum_after_op1);
10610 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10611 fold_check_failed (op1, tem);
10616 /* Fold a ternary tree expression with code CODE of type TYPE with
10617 operands OP0, OP1, and OP2. Return a folded expression if
10618 successful. Otherwise, return a tree expression with code CODE of
10619 type TYPE with operands OP0, OP1, and OP2. */
10622 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10626 #ifdef ENABLE_FOLD_CHECKING
10627 unsigned char checksum_before_op0[16],
10628 checksum_before_op1[16],
10629 checksum_before_op2[16],
10630 checksum_after_op0[16],
10631 checksum_after_op1[16],
10632 checksum_after_op2[16];
10633 struct md5_ctx ctx;
10636 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10637 md5_init_ctx (&ctx);
10638 fold_checksum_tree (op0, &ctx, ht);
10639 md5_finish_ctx (&ctx, checksum_before_op0);
10642 md5_init_ctx (&ctx);
10643 fold_checksum_tree (op1, &ctx, ht);
10644 md5_finish_ctx (&ctx, checksum_before_op1);
10647 md5_init_ctx (&ctx);
10648 fold_checksum_tree (op2, &ctx, ht);
10649 md5_finish_ctx (&ctx, checksum_before_op2);
10653 tem = fold_ternary (code, type, op0, op1, op2);
10655 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10657 #ifdef ENABLE_FOLD_CHECKING
10658 md5_init_ctx (&ctx);
10659 fold_checksum_tree (op0, &ctx, ht);
10660 md5_finish_ctx (&ctx, checksum_after_op0);
10663 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10664 fold_check_failed (op0, tem);
10666 md5_init_ctx (&ctx);
10667 fold_checksum_tree (op1, &ctx, ht);
10668 md5_finish_ctx (&ctx, checksum_after_op1);
10671 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10672 fold_check_failed (op1, tem);
10674 md5_init_ctx (&ctx);
10675 fold_checksum_tree (op2, &ctx, ht);
10676 md5_finish_ctx (&ctx, checksum_after_op2);
10679 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10680 fold_check_failed (op2, tem);
10685 /* Perform constant folding and related simplification of initializer
10686 expression EXPR. These behave identically to "fold_buildN" but ignore
10687 potential run-time traps and exceptions that fold must preserve. */
10689 #define START_FOLD_INIT \
10690 int saved_signaling_nans = flag_signaling_nans;\
10691 int saved_trapping_math = flag_trapping_math;\
10692 int saved_rounding_math = flag_rounding_math;\
10693 int saved_trapv = flag_trapv;\
10694 flag_signaling_nans = 0;\
10695 flag_trapping_math = 0;\
10696 flag_rounding_math = 0;\
10699 #define END_FOLD_INIT \
10700 flag_signaling_nans = saved_signaling_nans;\
10701 flag_trapping_math = saved_trapping_math;\
10702 flag_rounding_math = saved_rounding_math;\
10703 flag_trapv = saved_trapv
10706 fold_build1_initializer (enum tree_code code, tree type, tree op)
10711 result = fold_build1 (code, type, op);
10718 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10723 result = fold_build2 (code, type, op0, op1);
10730 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10736 result = fold_build3 (code, type, op0, op1, op2);
10742 #undef START_FOLD_INIT
10743 #undef END_FOLD_INIT
10745 /* Determine if first argument is a multiple of second argument. Return 0 if
10746 it is not, or we cannot easily determined it to be.
10748 An example of the sort of thing we care about (at this point; this routine
10749 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10750 fold cases do now) is discovering that
10752 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10758 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10760 This code also handles discovering that
10762 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10764 is a multiple of 8 so we don't have to worry about dealing with a
10765 possible remainder.
10767 Note that we *look* inside a SAVE_EXPR only to determine how it was
10768 calculated; it is not safe for fold to do much of anything else with the
10769 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10770 at run time. For example, the latter example above *cannot* be implemented
10771 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10772 evaluation time of the original SAVE_EXPR is not necessarily the same at
10773 the time the new expression is evaluated. The only optimization of this
10774 sort that would be valid is changing
10776 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10780 SAVE_EXPR (I) * SAVE_EXPR (J)
10782 (where the same SAVE_EXPR (J) is used in the original and the
10783 transformed version). */
10786 multiple_of_p (tree type, tree top, tree bottom)
10788 if (operand_equal_p (top, bottom, 0))
10791 if (TREE_CODE (type) != INTEGER_TYPE)
10794 switch (TREE_CODE (top))
10797 /* Bitwise and provides a power of two multiple. If the mask is
10798 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10799 if (!integer_pow2p (bottom))
10804 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10805 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10809 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10810 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10813 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10817 op1 = TREE_OPERAND (top, 1);
10818 /* const_binop may not detect overflow correctly,
10819 so check for it explicitly here. */
10820 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10821 > TREE_INT_CST_LOW (op1)
10822 && TREE_INT_CST_HIGH (op1) == 0
10823 && 0 != (t1 = fold_convert (type,
10824 const_binop (LSHIFT_EXPR,
10827 && ! TREE_OVERFLOW (t1))
10828 return multiple_of_p (type, t1, bottom);
10833 /* Can't handle conversions from non-integral or wider integral type. */
10834 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10835 || (TYPE_PRECISION (type)
10836 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10839 /* .. fall through ... */
10842 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10845 if (TREE_CODE (bottom) != INTEGER_CST
10846 || (TYPE_UNSIGNED (type)
10847 && (tree_int_cst_sgn (top) < 0
10848 || tree_int_cst_sgn (bottom) < 0)))
10850 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10858 /* Return true if `t' is known to be non-negative. */
10861 tree_expr_nonnegative_p (tree t)
10863 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10866 switch (TREE_CODE (t))
10869 /* We can't return 1 if flag_wrapv is set because
10870 ABS_EXPR<INT_MIN> = INT_MIN. */
10871 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10876 return tree_int_cst_sgn (t) >= 0;
10879 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10882 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10883 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10884 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10886 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10887 both unsigned and at least 2 bits shorter than the result. */
10888 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10889 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10890 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10892 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10893 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10894 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10895 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10897 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10898 TYPE_PRECISION (inner2)) + 1;
10899 return prec < TYPE_PRECISION (TREE_TYPE (t));
10905 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10907 /* x * x for floating point x is always non-negative. */
10908 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10910 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10911 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10914 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10915 both unsigned and their total bits is shorter than the result. */
10916 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10917 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10918 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10920 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10921 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10922 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10923 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10924 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10925 < TYPE_PRECISION (TREE_TYPE (t));
10931 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10932 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10938 case TRUNC_DIV_EXPR:
10939 case CEIL_DIV_EXPR:
10940 case FLOOR_DIV_EXPR:
10941 case ROUND_DIV_EXPR:
10942 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10943 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10945 case TRUNC_MOD_EXPR:
10946 case CEIL_MOD_EXPR:
10947 case FLOOR_MOD_EXPR:
10948 case ROUND_MOD_EXPR:
10950 case NON_LVALUE_EXPR:
10952 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10954 case COMPOUND_EXPR:
10956 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10959 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10962 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10963 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10967 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10968 tree outer_type = TREE_TYPE (t);
10970 if (TREE_CODE (outer_type) == REAL_TYPE)
10972 if (TREE_CODE (inner_type) == REAL_TYPE)
10973 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10974 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10976 if (TYPE_UNSIGNED (inner_type))
10978 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10981 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10983 if (TREE_CODE (inner_type) == REAL_TYPE)
10984 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10985 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10986 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10987 && TYPE_UNSIGNED (inner_type);
10994 tree temp = TARGET_EXPR_SLOT (t);
10995 t = TARGET_EXPR_INITIAL (t);
10997 /* If the initializer is non-void, then it's a normal expression
10998 that will be assigned to the slot. */
10999 if (!VOID_TYPE_P (t))
11000 return tree_expr_nonnegative_p (t);
11002 /* Otherwise, the initializer sets the slot in some way. One common
11003 way is an assignment statement at the end of the initializer. */
11006 if (TREE_CODE (t) == BIND_EXPR)
11007 t = expr_last (BIND_EXPR_BODY (t));
11008 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11009 || TREE_CODE (t) == TRY_CATCH_EXPR)
11010 t = expr_last (TREE_OPERAND (t, 0));
11011 else if (TREE_CODE (t) == STATEMENT_LIST)
11016 if (TREE_CODE (t) == MODIFY_EXPR
11017 && TREE_OPERAND (t, 0) == temp)
11018 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11025 tree fndecl = get_callee_fndecl (t);
11026 tree arglist = TREE_OPERAND (t, 1);
11027 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11028 switch (DECL_FUNCTION_CODE (fndecl))
11030 #define CASE_BUILTIN_F(BUILT_IN_FN) \
11031 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
11032 #define CASE_BUILTIN_I(BUILT_IN_FN) \
11033 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
11035 CASE_BUILTIN_F (BUILT_IN_ACOS)
11036 CASE_BUILTIN_F (BUILT_IN_ACOSH)
11037 CASE_BUILTIN_F (BUILT_IN_CABS)
11038 CASE_BUILTIN_F (BUILT_IN_COSH)
11039 CASE_BUILTIN_F (BUILT_IN_ERFC)
11040 CASE_BUILTIN_F (BUILT_IN_EXP)
11041 CASE_BUILTIN_F (BUILT_IN_EXP10)
11042 CASE_BUILTIN_F (BUILT_IN_EXP2)
11043 CASE_BUILTIN_F (BUILT_IN_FABS)
11044 CASE_BUILTIN_F (BUILT_IN_FDIM)
11045 CASE_BUILTIN_F (BUILT_IN_HYPOT)
11046 CASE_BUILTIN_F (BUILT_IN_POW10)
11047 CASE_BUILTIN_I (BUILT_IN_FFS)
11048 CASE_BUILTIN_I (BUILT_IN_PARITY)
11049 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
11053 CASE_BUILTIN_F (BUILT_IN_SQRT)
11054 /* sqrt(-0.0) is -0.0. */
11055 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11057 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11059 CASE_BUILTIN_F (BUILT_IN_ASINH)
11060 CASE_BUILTIN_F (BUILT_IN_ATAN)
11061 CASE_BUILTIN_F (BUILT_IN_ATANH)
11062 CASE_BUILTIN_F (BUILT_IN_CBRT)
11063 CASE_BUILTIN_F (BUILT_IN_CEIL)
11064 CASE_BUILTIN_F (BUILT_IN_ERF)
11065 CASE_BUILTIN_F (BUILT_IN_EXPM1)
11066 CASE_BUILTIN_F (BUILT_IN_FLOOR)
11067 CASE_BUILTIN_F (BUILT_IN_FMOD)
11068 CASE_BUILTIN_F (BUILT_IN_FREXP)
11069 CASE_BUILTIN_F (BUILT_IN_LCEIL)
11070 CASE_BUILTIN_F (BUILT_IN_LDEXP)
11071 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
11072 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
11073 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
11074 CASE_BUILTIN_F (BUILT_IN_LLRINT)
11075 CASE_BUILTIN_F (BUILT_IN_LLROUND)
11076 CASE_BUILTIN_F (BUILT_IN_LRINT)
11077 CASE_BUILTIN_F (BUILT_IN_LROUND)
11078 CASE_BUILTIN_F (BUILT_IN_MODF)
11079 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
11080 CASE_BUILTIN_F (BUILT_IN_POW)
11081 CASE_BUILTIN_F (BUILT_IN_RINT)
11082 CASE_BUILTIN_F (BUILT_IN_ROUND)
11083 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11084 CASE_BUILTIN_F (BUILT_IN_SINH)
11085 CASE_BUILTIN_F (BUILT_IN_TANH)
11086 CASE_BUILTIN_F (BUILT_IN_TRUNC)
11087 /* True if the 1st argument is nonnegative. */
11088 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11090 CASE_BUILTIN_F (BUILT_IN_FMAX)
11091 /* True if the 1st OR 2nd arguments are nonnegative. */
11092 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11093 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11095 CASE_BUILTIN_F (BUILT_IN_FMIN)
11096 /* True if the 1st AND 2nd arguments are nonnegative. */
11097 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11098 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11100 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11101 /* True if the 2nd argument is nonnegative. */
11102 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11106 #undef CASE_BUILTIN_F
11107 #undef CASE_BUILTIN_I
11111 /* ... fall through ... */
11114 if (truth_value_p (TREE_CODE (t)))
11115 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11119 /* We don't know sign of `t', so be conservative and return false. */
11123 /* Return true when T is an address and is known to be nonzero.
11124 For floating point we further ensure that T is not denormal.
11125 Similar logic is present in nonzero_address in rtlanal.h. */
11128 tree_expr_nonzero_p (tree t)
11130 tree type = TREE_TYPE (t);
11132 /* Doing something useful for floating point would need more work. */
11133 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11136 switch (TREE_CODE (t))
11139 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11142 /* We used to test for !integer_zerop here. This does not work correctly
11143 if TREE_CONSTANT_OVERFLOW (t). */
11144 return (TREE_INT_CST_LOW (t) != 0
11145 || TREE_INT_CST_HIGH (t) != 0);
11148 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11150 /* With the presence of negative values it is hard
11151 to say something. */
11152 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11153 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11155 /* One of operands must be positive and the other non-negative. */
11156 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11157 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11162 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11164 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11165 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11171 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11172 tree outer_type = TREE_TYPE (t);
11174 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11175 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11181 tree base = get_base_address (TREE_OPERAND (t, 0));
11186 /* Weak declarations may link to NULL. */
11187 if (VAR_OR_FUNCTION_DECL_P (base))
11188 return !DECL_WEAK (base);
11190 /* Constants are never weak. */
11191 if (CONSTANT_CLASS_P (base))
11198 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11199 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11202 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11203 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11206 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11208 /* When both operands are nonzero, then MAX must be too. */
11209 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11212 /* MAX where operand 0 is positive is positive. */
11213 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11215 /* MAX where operand 1 is positive is positive. */
11216 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11217 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11221 case COMPOUND_EXPR:
11224 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11227 case NON_LVALUE_EXPR:
11228 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11231 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11232 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11235 return alloca_call_p (t);
11243 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11244 attempt to fold the expression to a constant without modifying TYPE,
11247 If the expression could be simplified to a constant, then return
11248 the constant. If the expression would not be simplified to a
11249 constant, then return NULL_TREE. */
11252 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11254 tree tem = fold_binary (code, type, op0, op1);
11255 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11258 /* Given the components of a unary expression CODE, TYPE and OP0,
11259 attempt to fold the expression to a constant without modifying
11262 If the expression could be simplified to a constant, then return
11263 the constant. If the expression would not be simplified to a
11264 constant, then return NULL_TREE. */
11267 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11269 tree tem = fold_unary (code, type, op0);
11270 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11273 /* If EXP represents referencing an element in a constant string
11274 (either via pointer arithmetic or array indexing), return the
11275 tree representing the value accessed, otherwise return NULL. */
11278 fold_read_from_constant_string (tree exp)
11280 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11282 tree exp1 = TREE_OPERAND (exp, 0);
11286 if (TREE_CODE (exp) == INDIRECT_REF)
11287 string = string_constant (exp1, &index);
11290 tree low_bound = array_ref_low_bound (exp);
11291 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11293 /* Optimize the special-case of a zero lower bound.
11295 We convert the low_bound to sizetype to avoid some problems
11296 with constant folding. (E.g. suppose the lower bound is 1,
11297 and its mode is QI. Without the conversion,l (ARRAY
11298 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11299 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11300 if (! integer_zerop (low_bound))
11301 index = size_diffop (index, fold_convert (sizetype, low_bound));
11307 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11308 && TREE_CODE (string) == STRING_CST
11309 && TREE_CODE (index) == INTEGER_CST
11310 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11311 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11313 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11314 return fold_convert (TREE_TYPE (exp),
11315 build_int_cst (NULL_TREE,
11316 (TREE_STRING_POINTER (string)
11317 [TREE_INT_CST_LOW (index)])));
11322 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11323 an integer constant or real constant.
11325 TYPE is the type of the result. */
11328 fold_negate_const (tree arg0, tree type)
11330 tree t = NULL_TREE;
11332 switch (TREE_CODE (arg0))
11336 unsigned HOST_WIDE_INT low;
11337 HOST_WIDE_INT high;
11338 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11339 TREE_INT_CST_HIGH (arg0),
11341 t = build_int_cst_wide (type, low, high);
11342 t = force_fit_type (t, 1,
11343 (overflow | TREE_OVERFLOW (arg0))
11344 && !TYPE_UNSIGNED (type),
11345 TREE_CONSTANT_OVERFLOW (arg0));
11350 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11354 gcc_unreachable ();
11360 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11361 an integer constant or real constant.
11363 TYPE is the type of the result. */
11366 fold_abs_const (tree arg0, tree type)
11368 tree t = NULL_TREE;
11370 switch (TREE_CODE (arg0))
11373 /* If the value is unsigned, then the absolute value is
11374 the same as the ordinary value. */
11375 if (TYPE_UNSIGNED (type))
11377 /* Similarly, if the value is non-negative. */
11378 else if (INT_CST_LT (integer_minus_one_node, arg0))
11380 /* If the value is negative, then the absolute value is
11384 unsigned HOST_WIDE_INT low;
11385 HOST_WIDE_INT high;
11386 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11387 TREE_INT_CST_HIGH (arg0),
11389 t = build_int_cst_wide (type, low, high);
11390 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11391 TREE_CONSTANT_OVERFLOW (arg0));
11396 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11397 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11403 gcc_unreachable ();
11409 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11410 constant. TYPE is the type of the result. */
11413 fold_not_const (tree arg0, tree type)
11415 tree t = NULL_TREE;
11417 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11419 t = build_int_cst_wide (type,
11420 ~ TREE_INT_CST_LOW (arg0),
11421 ~ TREE_INT_CST_HIGH (arg0));
11422 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11423 TREE_CONSTANT_OVERFLOW (arg0));
11428 /* Given CODE, a relational operator, the target type, TYPE and two
11429 constant operands OP0 and OP1, return the result of the
11430 relational operation. If the result is not a compile time
11431 constant, then return NULL_TREE. */
11434 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11436 int result, invert;
11438 /* From here on, the only cases we handle are when the result is
11439 known to be a constant. */
11441 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11443 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11444 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11446 /* Handle the cases where either operand is a NaN. */
11447 if (real_isnan (c0) || real_isnan (c1))
11457 case UNORDERED_EXPR:
11471 if (flag_trapping_math)
11477 gcc_unreachable ();
11480 return constant_boolean_node (result, type);
11483 return constant_boolean_node (real_compare (code, c0, c1), type);
11486 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11488 To compute GT, swap the arguments and do LT.
11489 To compute GE, do LT and invert the result.
11490 To compute LE, swap the arguments, do LT and invert the result.
11491 To compute NE, do EQ and invert the result.
11493 Therefore, the code below must handle only EQ and LT. */
11495 if (code == LE_EXPR || code == GT_EXPR)
11500 code = swap_tree_comparison (code);
11503 /* Note that it is safe to invert for real values here because we
11504 have already handled the one case that it matters. */
11507 if (code == NE_EXPR || code == GE_EXPR)
11510 code = invert_tree_comparison (code, false);
11513 /* Compute a result for LT or EQ if args permit;
11514 Otherwise return T. */
11515 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11517 if (code == EQ_EXPR)
11518 result = tree_int_cst_equal (op0, op1);
11519 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11520 result = INT_CST_LT_UNSIGNED (op0, op1);
11522 result = INT_CST_LT (op0, op1);
11529 return constant_boolean_node (result, type);
11532 /* Build an expression for the a clean point containing EXPR with type TYPE.
11533 Don't build a cleanup point expression for EXPR which don't have side
11537 fold_build_cleanup_point_expr (tree type, tree expr)
11539 /* If the expression does not have side effects then we don't have to wrap
11540 it with a cleanup point expression. */
11541 if (!TREE_SIDE_EFFECTS (expr))
11544 /* If the expression is a return, check to see if the expression inside the
11545 return has no side effects or the right hand side of the modify expression
11546 inside the return. If either don't have side effects set we don't need to
11547 wrap the expression in a cleanup point expression. Note we don't check the
11548 left hand side of the modify because it should always be a return decl. */
11549 if (TREE_CODE (expr) == RETURN_EXPR)
11551 tree op = TREE_OPERAND (expr, 0);
11552 if (!op || !TREE_SIDE_EFFECTS (op))
11554 op = TREE_OPERAND (op, 1);
11555 if (!TREE_SIDE_EFFECTS (op))
11559 return build1 (CLEANUP_POINT_EXPR, type, expr);
11562 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11563 avoid confusing the gimplify process. */
11566 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11568 /* The size of the object is not relevant when talking about its address. */
11569 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11570 t = TREE_OPERAND (t, 0);
11572 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11573 if (TREE_CODE (t) == INDIRECT_REF
11574 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11576 t = TREE_OPERAND (t, 0);
11577 if (TREE_TYPE (t) != ptrtype)
11578 t = build1 (NOP_EXPR, ptrtype, t);
11584 while (handled_component_p (base))
11585 base = TREE_OPERAND (base, 0);
11587 TREE_ADDRESSABLE (base) = 1;
11589 t = build1 (ADDR_EXPR, ptrtype, t);
11596 build_fold_addr_expr (tree t)
11598 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11601 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11602 of an indirection through OP0, or NULL_TREE if no simplification is
11606 fold_indirect_ref_1 (tree type, tree op0)
11612 subtype = TREE_TYPE (sub);
11613 if (!POINTER_TYPE_P (subtype))
11616 if (TREE_CODE (sub) == ADDR_EXPR)
11618 tree op = TREE_OPERAND (sub, 0);
11619 tree optype = TREE_TYPE (op);
11620 /* *&p => p; make sure to handle *&"str"[cst] here. */
11621 if (type == optype)
11623 tree fop = fold_read_from_constant_string (op);
11629 /* *(foo *)&fooarray => fooarray[0] */
11630 else if (TREE_CODE (optype) == ARRAY_TYPE
11631 && type == TREE_TYPE (optype))
11633 tree type_domain = TYPE_DOMAIN (optype);
11634 tree min_val = size_zero_node;
11635 if (type_domain && TYPE_MIN_VALUE (type_domain))
11636 min_val = TYPE_MIN_VALUE (type_domain);
11637 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11641 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11642 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11643 && type == TREE_TYPE (TREE_TYPE (subtype)))
11646 tree min_val = size_zero_node;
11647 sub = build_fold_indirect_ref (sub);
11648 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11649 if (type_domain && TYPE_MIN_VALUE (type_domain))
11650 min_val = TYPE_MIN_VALUE (type_domain);
11651 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11657 /* Builds an expression for an indirection through T, simplifying some
11661 build_fold_indirect_ref (tree t)
11663 tree type = TREE_TYPE (TREE_TYPE (t));
11664 tree sub = fold_indirect_ref_1 (type, t);
11669 return build1 (INDIRECT_REF, type, t);
11672 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11675 fold_indirect_ref (tree t)
11677 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11685 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11686 whose result is ignored. The type of the returned tree need not be
11687 the same as the original expression. */
11690 fold_ignored_result (tree t)
11692 if (!TREE_SIDE_EFFECTS (t))
11693 return integer_zero_node;
11696 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11699 t = TREE_OPERAND (t, 0);
11703 case tcc_comparison:
11704 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11705 t = TREE_OPERAND (t, 0);
11706 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11707 t = TREE_OPERAND (t, 1);
11712 case tcc_expression:
11713 switch (TREE_CODE (t))
11715 case COMPOUND_EXPR:
11716 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11718 t = TREE_OPERAND (t, 0);
11722 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11723 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11725 t = TREE_OPERAND (t, 0);
11738 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11739 This can only be applied to objects of a sizetype. */
11742 round_up (tree value, int divisor)
11744 tree div = NULL_TREE;
11746 gcc_assert (divisor > 0);
11750 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11751 have to do anything. Only do this when we are not given a const,
11752 because in that case, this check is more expensive than just
11754 if (TREE_CODE (value) != INTEGER_CST)
11756 div = build_int_cst (TREE_TYPE (value), divisor);
11758 if (multiple_of_p (TREE_TYPE (value), value, div))
11762 /* If divisor is a power of two, simplify this to bit manipulation. */
11763 if (divisor == (divisor & -divisor))
11767 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11768 value = size_binop (PLUS_EXPR, value, t);
11769 t = build_int_cst (TREE_TYPE (value), -divisor);
11770 value = size_binop (BIT_AND_EXPR, value, t);
11775 div = build_int_cst (TREE_TYPE (value), divisor);
11776 value = size_binop (CEIL_DIV_EXPR, value, div);
11777 value = size_binop (MULT_EXPR, value, div);
11783 /* Likewise, but round down. */
11786 round_down (tree value, int divisor)
11788 tree div = NULL_TREE;
11790 gcc_assert (divisor > 0);
11794 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11795 have to do anything. Only do this when we are not given a const,
11796 because in that case, this check is more expensive than just
11798 if (TREE_CODE (value) != INTEGER_CST)
11800 div = build_int_cst (TREE_TYPE (value), divisor);
11802 if (multiple_of_p (TREE_TYPE (value), value, div))
11806 /* If divisor is a power of two, simplify this to bit manipulation. */
11807 if (divisor == (divisor & -divisor))
11811 t = build_int_cst (TREE_TYPE (value), -divisor);
11812 value = size_binop (BIT_AND_EXPR, value, t);
11817 div = build_int_cst (TREE_TYPE (value), divisor);
11818 value = size_binop (FLOOR_DIV_EXPR, value, div);
11819 value = size_binop (MULT_EXPR, value, div);
11825 /* Returns the pointer to the base of the object addressed by EXP and
11826 extracts the information about the offset of the access, storing it
11827 to PBITPOS and POFFSET. */
11830 split_address_to_core_and_offset (tree exp,
11831 HOST_WIDE_INT *pbitpos, tree *poffset)
11834 enum machine_mode mode;
11835 int unsignedp, volatilep;
11836 HOST_WIDE_INT bitsize;
11838 if (TREE_CODE (exp) == ADDR_EXPR)
11840 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11841 poffset, &mode, &unsignedp, &volatilep,
11843 core = build_fold_addr_expr (core);
11849 *poffset = NULL_TREE;
11855 /* Returns true if addresses of E1 and E2 differ by a constant, false
11856 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11859 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11862 HOST_WIDE_INT bitpos1, bitpos2;
11863 tree toffset1, toffset2, tdiff, type;
11865 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11866 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11868 if (bitpos1 % BITS_PER_UNIT != 0
11869 || bitpos2 % BITS_PER_UNIT != 0
11870 || !operand_equal_p (core1, core2, 0))
11873 if (toffset1 && toffset2)
11875 type = TREE_TYPE (toffset1);
11876 if (type != TREE_TYPE (toffset2))
11877 toffset2 = fold_convert (type, toffset2);
11879 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11880 if (!cst_and_fits_in_hwi (tdiff))
11883 *diff = int_cst_value (tdiff);
11885 else if (toffset1 || toffset2)
11887 /* If only one of the offsets is non-constant, the difference cannot
11894 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11898 /* Simplify the floating point expression EXP when the sign of the
11899 result is not significant. Return NULL_TREE if no simplification
11903 fold_strip_sign_ops (tree exp)
11907 switch (TREE_CODE (exp))
11911 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11912 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11916 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11918 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11919 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11920 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11921 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11922 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11923 arg1 ? arg1 : TREE_OPERAND (exp, 1));