1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 || (overflowable > 0 && sign_extended_type))
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
277 TREE_CONSTANT_OVERFLOW (t) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
298 h = h1 + h2 + (l < l1);
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 return (*hv & h1) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
355 for (j = 0; j < 4; j++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 if (SHIFT_COUNT_TRUNCATED)
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 unsigned HOST_WIDE_INT signmask;
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 if (SHIFT_COUNT_TRUNCATED)
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
663 { /* scale divisor and dividend */
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
742 decode (quo, lquo, hquo);
745 /* If result is negative, make it so. */
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, <wice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code)
888 /* Check whether we may negate an integer constant T without causing
892 may_negate_without_overflow_p (tree t)
894 unsigned HOST_WIDE_INT val;
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
907 if (TREE_INT_CST_LOW (t) != 0)
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
923 negate_expr_p (tree t)
930 type = TREE_TYPE (t);
933 switch (TREE_CODE (t))
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
983 tree tem = strip_float_extensions (t);
985 return negate_expr_p (tem);
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1017 negate_expr (tree t)
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1028 switch (TREE_CODE (t))
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1116 TREE_OPERAND (t, 1)));
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1135 tree fndecl, arg, arglist;
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1238 var = op1, neg_var_p = neg1_p;
1240 /* Now do any needed negations. */
1242 *minus_litp = *litp, *litp = 0;
1244 *conp = negate_expr (*conp);
1246 var = negate_expr (var);
1248 else if (TREE_CONSTANT (in))
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1284 if (code == PLUS_EXPR)
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1295 else if (code == MINUS_EXPR)
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1338 low = int1l | int2l, hi = int1h | int2h;
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1346 low = int1l & int2l, hi = int1h & int2h;
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1389 if (code == CEIL_DIV_EXPR)
1392 low = int1l / int2l, hi = 0;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1401 low = int1l, hi = int1h;
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1422 if (code == CEIL_MOD_EXPR)
1424 low = int1l % int2l, hi = 0;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1451 low = int2l, hi = int2h;
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1501 if (TREE_CODE (arg1) == REAL_CST)
1503 enum machine_mode mode;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1534 else if (REAL_VALUE_ISNAN (d2))
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1540 /* Don't constant fold this floating point operation if
1541 the result has overflowed and flag_trapping_math. */
1543 if (flag_trapping_math
1544 && MODE_HAS_INFINITIES (mode)
1545 && REAL_VALUE_ISINF (result)
1546 && !REAL_VALUE_ISINF (d1)
1547 && !REAL_VALUE_ISINF (d2))
1550 /* Don't constant fold this floating point operation if the
1551 result may dependent upon the run-time rounding mode and
1552 flag_rounding_math is set, or if GCC's software emulation
1553 is unable to accurately represent the result. */
1555 if ((flag_rounding_math
1556 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1557 && !flag_unsafe_math_optimizations))
1558 && (inexact || !real_identical (&result, &value)))
1561 t = build_real (type, result);
1563 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1564 TREE_CONSTANT_OVERFLOW (t)
1566 | TREE_CONSTANT_OVERFLOW (arg1)
1567 | TREE_CONSTANT_OVERFLOW (arg2);
1570 if (TREE_CODE (arg1) == COMPLEX_CST)
1572 tree type = TREE_TYPE (arg1);
1573 tree r1 = TREE_REALPART (arg1);
1574 tree i1 = TREE_IMAGPART (arg1);
1575 tree r2 = TREE_REALPART (arg2);
1576 tree i2 = TREE_IMAGPART (arg2);
1582 t = build_complex (type,
1583 const_binop (PLUS_EXPR, r1, r2, notrunc),
1584 const_binop (PLUS_EXPR, i1, i2, notrunc));
1588 t = build_complex (type,
1589 const_binop (MINUS_EXPR, r1, r2, notrunc),
1590 const_binop (MINUS_EXPR, i1, i2, notrunc));
1594 t = build_complex (type,
1595 const_binop (MINUS_EXPR,
1596 const_binop (MULT_EXPR,
1598 const_binop (MULT_EXPR,
1601 const_binop (PLUS_EXPR,
1602 const_binop (MULT_EXPR,
1604 const_binop (MULT_EXPR,
1611 tree t1, t2, real, imag;
1613 = const_binop (PLUS_EXPR,
1614 const_binop (MULT_EXPR, r2, r2, notrunc),
1615 const_binop (MULT_EXPR, i2, i2, notrunc),
1618 t1 = const_binop (PLUS_EXPR,
1619 const_binop (MULT_EXPR, r1, r2, notrunc),
1620 const_binop (MULT_EXPR, i1, i2, notrunc),
1622 t2 = const_binop (MINUS_EXPR,
1623 const_binop (MULT_EXPR, i1, r2, notrunc),
1624 const_binop (MULT_EXPR, r1, i2, notrunc),
1627 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1629 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1630 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1634 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1635 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1640 t = build_complex (type, real, imag);
1652 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1653 indicates which particular sizetype to create. */
1656 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1658 return build_int_cst (sizetype_tab[(int) kind], number);
1661 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1662 is a tree code. The type of the result is taken from the operands.
1663 Both must be the same type integer type and it must be a size type.
1664 If the operands are constant, so is the result. */
1667 size_binop (enum tree_code code, tree arg0, tree arg1)
1669 tree type = TREE_TYPE (arg0);
1671 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1672 && type == TREE_TYPE (arg1));
1674 /* Handle the special case of two integer constants faster. */
1675 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1677 /* And some specific cases even faster than that. */
1678 if (code == PLUS_EXPR && integer_zerop (arg0))
1680 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1681 && integer_zerop (arg1))
1683 else if (code == MULT_EXPR && integer_onep (arg0))
1686 /* Handle general case of two integer constants. */
1687 return int_const_binop (code, arg0, arg1, 0);
1690 if (arg0 == error_mark_node || arg1 == error_mark_node)
1691 return error_mark_node;
1693 return fold_build2 (code, type, arg0, arg1);
1696 /* Given two values, either both of sizetype or both of bitsizetype,
1697 compute the difference between the two values. Return the value
1698 in signed type corresponding to the type of the operands. */
1701 size_diffop (tree arg0, tree arg1)
1703 tree type = TREE_TYPE (arg0);
1706 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1707 && type == TREE_TYPE (arg1));
1709 /* If the type is already signed, just do the simple thing. */
1710 if (!TYPE_UNSIGNED (type))
1711 return size_binop (MINUS_EXPR, arg0, arg1);
1713 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1715 /* If either operand is not a constant, do the conversions to the signed
1716 type and subtract. The hardware will do the right thing with any
1717 overflow in the subtraction. */
1718 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1719 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1720 fold_convert (ctype, arg1));
1722 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1723 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1724 overflow) and negate (which can't either). Special-case a result
1725 of zero while we're here. */
1726 if (tree_int_cst_equal (arg0, arg1))
1727 return fold_convert (ctype, integer_zero_node);
1728 else if (tree_int_cst_lt (arg1, arg0))
1729 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1731 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1732 fold_convert (ctype, size_binop (MINUS_EXPR,
1736 /* A subroutine of fold_convert_const handling conversions of an
1737 INTEGER_CST to another integer type. */
1740 fold_convert_const_int_from_int (tree type, tree arg1)
1744 /* Given an integer constant, make new constant with new type,
1745 appropriately sign-extended or truncated. */
1746 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1747 TREE_INT_CST_HIGH (arg1));
1749 t = force_fit_type (t,
1750 /* Don't set the overflow when
1751 converting a pointer */
1752 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1753 (TREE_INT_CST_HIGH (arg1) < 0
1754 && (TYPE_UNSIGNED (type)
1755 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1756 | TREE_OVERFLOW (arg1),
1757 TREE_CONSTANT_OVERFLOW (arg1));
1762 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1763 to an integer type. */
1766 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1771 /* The following code implements the floating point to integer
1772 conversion rules required by the Java Language Specification,
1773 that IEEE NaNs are mapped to zero and values that overflow
1774 the target precision saturate, i.e. values greater than
1775 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1776 are mapped to INT_MIN. These semantics are allowed by the
1777 C and C++ standards that simply state that the behavior of
1778 FP-to-integer conversion is unspecified upon overflow. */
1780 HOST_WIDE_INT high, low;
1782 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1786 case FIX_TRUNC_EXPR:
1787 real_trunc (&r, VOIDmode, &x);
1791 real_ceil (&r, VOIDmode, &x);
1794 case FIX_FLOOR_EXPR:
1795 real_floor (&r, VOIDmode, &x);
1798 case FIX_ROUND_EXPR:
1799 real_round (&r, VOIDmode, &x);
1806 /* If R is NaN, return zero and show we have an overflow. */
1807 if (REAL_VALUE_ISNAN (r))
1814 /* See if R is less than the lower bound or greater than the
1819 tree lt = TYPE_MIN_VALUE (type);
1820 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1821 if (REAL_VALUES_LESS (r, l))
1824 high = TREE_INT_CST_HIGH (lt);
1825 low = TREE_INT_CST_LOW (lt);
1831 tree ut = TYPE_MAX_VALUE (type);
1834 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1835 if (REAL_VALUES_LESS (u, r))
1838 high = TREE_INT_CST_HIGH (ut);
1839 low = TREE_INT_CST_LOW (ut);
1845 REAL_VALUE_TO_INT (&low, &high, r);
1847 t = build_int_cst_wide (type, low, high);
1849 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1850 TREE_CONSTANT_OVERFLOW (arg1));
1854 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1855 to another floating point type. */
1858 fold_convert_const_real_from_real (tree type, tree arg1)
1860 REAL_VALUE_TYPE value;
1863 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1864 t = build_real (type, value);
1866 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1867 TREE_CONSTANT_OVERFLOW (t)
1868 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1872 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1873 type TYPE. If no simplification can be done return NULL_TREE. */
1876 fold_convert_const (enum tree_code code, tree type, tree arg1)
1878 if (TREE_TYPE (arg1) == type)
1881 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1883 if (TREE_CODE (arg1) == INTEGER_CST)
1884 return fold_convert_const_int_from_int (type, arg1);
1885 else if (TREE_CODE (arg1) == REAL_CST)
1886 return fold_convert_const_int_from_real (code, type, arg1);
1888 else if (TREE_CODE (type) == REAL_TYPE)
1890 if (TREE_CODE (arg1) == INTEGER_CST)
1891 return build_real_from_int_cst (type, arg1);
1892 if (TREE_CODE (arg1) == REAL_CST)
1893 return fold_convert_const_real_from_real (type, arg1);
1898 /* Construct a vector of zero elements of vector type TYPE. */
1901 build_zero_vector (tree type)
1906 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1907 units = TYPE_VECTOR_SUBPARTS (type);
1910 for (i = 0; i < units; i++)
1911 list = tree_cons (NULL_TREE, elem, list);
1912 return build_vector (type, list);
1915 /* Convert expression ARG to type TYPE. Used by the middle-end for
1916 simple conversions in preference to calling the front-end's convert. */
1919 fold_convert (tree type, tree arg)
1921 tree orig = TREE_TYPE (arg);
1927 if (TREE_CODE (arg) == ERROR_MARK
1928 || TREE_CODE (type) == ERROR_MARK
1929 || TREE_CODE (orig) == ERROR_MARK)
1930 return error_mark_node;
1932 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1933 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1934 TYPE_MAIN_VARIANT (orig)))
1935 return fold_build1 (NOP_EXPR, type, arg);
1937 switch (TREE_CODE (type))
1939 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1940 case POINTER_TYPE: case REFERENCE_TYPE:
1942 if (TREE_CODE (arg) == INTEGER_CST)
1944 tem = fold_convert_const (NOP_EXPR, type, arg);
1945 if (tem != NULL_TREE)
1948 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1949 || TREE_CODE (orig) == OFFSET_TYPE)
1950 return fold_build1 (NOP_EXPR, type, arg);
1951 if (TREE_CODE (orig) == COMPLEX_TYPE)
1953 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1954 return fold_convert (type, tem);
1956 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1957 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1958 return fold_build1 (NOP_EXPR, type, arg);
1961 if (TREE_CODE (arg) == INTEGER_CST)
1963 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1964 if (tem != NULL_TREE)
1967 else if (TREE_CODE (arg) == REAL_CST)
1969 tem = fold_convert_const (NOP_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1974 switch (TREE_CODE (orig))
1976 case INTEGER_TYPE: case CHAR_TYPE:
1977 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1978 case POINTER_TYPE: case REFERENCE_TYPE:
1979 return fold_build1 (FLOAT_EXPR, type, arg);
1982 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1986 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1987 return fold_convert (type, tem);
1994 switch (TREE_CODE (orig))
1996 case INTEGER_TYPE: case CHAR_TYPE:
1997 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1998 case POINTER_TYPE: case REFERENCE_TYPE:
2000 return build2 (COMPLEX_EXPR, type,
2001 fold_convert (TREE_TYPE (type), arg),
2002 fold_convert (TREE_TYPE (type), integer_zero_node));
2007 if (TREE_CODE (arg) == COMPLEX_EXPR)
2009 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2010 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2011 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2014 arg = save_expr (arg);
2015 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2016 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2017 rpart = fold_convert (TREE_TYPE (type), rpart);
2018 ipart = fold_convert (TREE_TYPE (type), ipart);
2019 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2027 if (integer_zerop (arg))
2028 return build_zero_vector (type);
2029 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2031 || TREE_CODE (orig) == VECTOR_TYPE);
2032 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2035 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2042 /* Return false if expr can be assumed not to be an lvalue, true
2046 maybe_lvalue_p (tree x)
2048 /* We only need to wrap lvalue tree codes. */
2049 switch (TREE_CODE (x))
2060 case ALIGN_INDIRECT_REF:
2061 case MISALIGNED_INDIRECT_REF:
2063 case ARRAY_RANGE_REF:
2069 case PREINCREMENT_EXPR:
2070 case PREDECREMENT_EXPR:
2072 case TRY_CATCH_EXPR:
2073 case WITH_CLEANUP_EXPR:
2084 /* Assume the worst for front-end tree codes. */
2085 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2093 /* Return an expr equal to X but certainly not valid as an lvalue. */
2098 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2103 if (! maybe_lvalue_p (x))
2105 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2108 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2109 Zero means allow extended lvalues. */
2111 int pedantic_lvalues;
2113 /* When pedantic, return an expr equal to X but certainly not valid as a
2114 pedantic lvalue. Otherwise, return X. */
2117 pedantic_non_lvalue (tree x)
2119 if (pedantic_lvalues)
2120 return non_lvalue (x);
2125 /* Given a tree comparison code, return the code that is the logical inverse
2126 of the given code. It is not safe to do this for floating-point
2127 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2128 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2131 invert_tree_comparison (enum tree_code code, bool honor_nans)
2133 if (honor_nans && flag_trapping_math)
2143 return honor_nans ? UNLE_EXPR : LE_EXPR;
2145 return honor_nans ? UNLT_EXPR : LT_EXPR;
2147 return honor_nans ? UNGE_EXPR : GE_EXPR;
2149 return honor_nans ? UNGT_EXPR : GT_EXPR;
2163 return UNORDERED_EXPR;
2164 case UNORDERED_EXPR:
2165 return ORDERED_EXPR;
2171 /* Similar, but return the comparison that results if the operands are
2172 swapped. This is safe for floating-point. */
2175 swap_tree_comparison (enum tree_code code)
2182 case UNORDERED_EXPR:
2208 /* Convert a comparison tree code from an enum tree_code representation
2209 into a compcode bit-based encoding. This function is the inverse of
2210 compcode_to_comparison. */
2212 static enum comparison_code
2213 comparison_to_compcode (enum tree_code code)
2230 return COMPCODE_ORD;
2231 case UNORDERED_EXPR:
2232 return COMPCODE_UNORD;
2234 return COMPCODE_UNLT;
2236 return COMPCODE_UNEQ;
2238 return COMPCODE_UNLE;
2240 return COMPCODE_UNGT;
2242 return COMPCODE_LTGT;
2244 return COMPCODE_UNGE;
2250 /* Convert a compcode bit-based encoding of a comparison operator back
2251 to GCC's enum tree_code representation. This function is the
2252 inverse of comparison_to_compcode. */
2254 static enum tree_code
2255 compcode_to_comparison (enum comparison_code code)
2272 return ORDERED_EXPR;
2273 case COMPCODE_UNORD:
2274 return UNORDERED_EXPR;
2292 /* Return a tree for the comparison which is the combination of
2293 doing the AND or OR (depending on CODE) of the two operations LCODE
2294 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2295 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2296 if this makes the transformation invalid. */
2299 combine_comparisons (enum tree_code code, enum tree_code lcode,
2300 enum tree_code rcode, tree truth_type,
2301 tree ll_arg, tree lr_arg)
2303 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2304 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2305 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2306 enum comparison_code compcode;
2310 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2311 compcode = lcompcode & rcompcode;
2314 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2315 compcode = lcompcode | rcompcode;
2324 /* Eliminate unordered comparisons, as well as LTGT and ORD
2325 which are not used unless the mode has NaNs. */
2326 compcode &= ~COMPCODE_UNORD;
2327 if (compcode == COMPCODE_LTGT)
2328 compcode = COMPCODE_NE;
2329 else if (compcode == COMPCODE_ORD)
2330 compcode = COMPCODE_TRUE;
2332 else if (flag_trapping_math)
2334 /* Check that the original operation and the optimized ones will trap
2335 under the same condition. */
2336 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2337 && (lcompcode != COMPCODE_EQ)
2338 && (lcompcode != COMPCODE_ORD);
2339 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2340 && (rcompcode != COMPCODE_EQ)
2341 && (rcompcode != COMPCODE_ORD);
2342 bool trap = (compcode & COMPCODE_UNORD) == 0
2343 && (compcode != COMPCODE_EQ)
2344 && (compcode != COMPCODE_ORD);
2346 /* In a short-circuited boolean expression the LHS might be
2347 such that the RHS, if evaluated, will never trap. For
2348 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2349 if neither x nor y is NaN. (This is a mixed blessing: for
2350 example, the expression above will never trap, hence
2351 optimizing it to x < y would be invalid). */
2352 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2353 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2356 /* If the comparison was short-circuited, and only the RHS
2357 trapped, we may now generate a spurious trap. */
2359 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2362 /* If we changed the conditions that cause a trap, we lose. */
2363 if ((ltrap || rtrap) != trap)
2367 if (compcode == COMPCODE_TRUE)
2368 return constant_boolean_node (true, truth_type);
2369 else if (compcode == COMPCODE_FALSE)
2370 return constant_boolean_node (false, truth_type);
2372 return fold_build2 (compcode_to_comparison (compcode),
2373 truth_type, ll_arg, lr_arg);
2376 /* Return nonzero if CODE is a tree code that represents a truth value. */
2379 truth_value_p (enum tree_code code)
2381 return (TREE_CODE_CLASS (code) == tcc_comparison
2382 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2383 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2384 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2387 /* Return nonzero if two operands (typically of the same tree node)
2388 are necessarily equal. If either argument has side-effects this
2389 function returns zero. FLAGS modifies behavior as follows:
2391 If OEP_ONLY_CONST is set, only return nonzero for constants.
2392 This function tests whether the operands are indistinguishable;
2393 it does not test whether they are equal using C's == operation.
2394 The distinction is important for IEEE floating point, because
2395 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2396 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2398 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2399 even though it may hold multiple values during a function.
2400 This is because a GCC tree node guarantees that nothing else is
2401 executed between the evaluation of its "operands" (which may often
2402 be evaluated in arbitrary order). Hence if the operands themselves
2403 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2404 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2405 unset means assuming isochronic (or instantaneous) tree equivalence.
2406 Unless comparing arbitrary expression trees, such as from different
2407 statements, this flag can usually be left unset.
2409 If OEP_PURE_SAME is set, then pure functions with identical arguments
2410 are considered the same. It is used when the caller has other ways
2411 to ensure that global memory is unchanged in between. */
2414 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2416 /* If either is ERROR_MARK, they aren't equal. */
2417 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2420 /* If both types don't have the same signedness, then we can't consider
2421 them equal. We must check this before the STRIP_NOPS calls
2422 because they may change the signedness of the arguments. */
2423 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2429 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2430 /* This is needed for conversions and for COMPONENT_REF.
2431 Might as well play it safe and always test this. */
2432 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2433 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2434 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2437 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2438 We don't care about side effects in that case because the SAVE_EXPR
2439 takes care of that for us. In all other cases, two expressions are
2440 equal if they have no side effects. If we have two identical
2441 expressions with side effects that should be treated the same due
2442 to the only side effects being identical SAVE_EXPR's, that will
2443 be detected in the recursive calls below. */
2444 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2445 && (TREE_CODE (arg0) == SAVE_EXPR
2446 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2449 /* Next handle constant cases, those for which we can return 1 even
2450 if ONLY_CONST is set. */
2451 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2452 switch (TREE_CODE (arg0))
2455 return (! TREE_CONSTANT_OVERFLOW (arg0)
2456 && ! TREE_CONSTANT_OVERFLOW (arg1)
2457 && tree_int_cst_equal (arg0, arg1));
2460 return (! TREE_CONSTANT_OVERFLOW (arg0)
2461 && ! TREE_CONSTANT_OVERFLOW (arg1)
2462 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2463 TREE_REAL_CST (arg1)));
2469 if (TREE_CONSTANT_OVERFLOW (arg0)
2470 || TREE_CONSTANT_OVERFLOW (arg1))
2473 v1 = TREE_VECTOR_CST_ELTS (arg0);
2474 v2 = TREE_VECTOR_CST_ELTS (arg1);
2477 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2480 v1 = TREE_CHAIN (v1);
2481 v2 = TREE_CHAIN (v2);
2488 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2490 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2494 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2495 && ! memcmp (TREE_STRING_POINTER (arg0),
2496 TREE_STRING_POINTER (arg1),
2497 TREE_STRING_LENGTH (arg0)));
2500 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2506 if (flags & OEP_ONLY_CONST)
2509 /* Define macros to test an operand from arg0 and arg1 for equality and a
2510 variant that allows null and views null as being different from any
2511 non-null value. In the latter case, if either is null, the both
2512 must be; otherwise, do the normal comparison. */
2513 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2514 TREE_OPERAND (arg1, N), flags)
2516 #define OP_SAME_WITH_NULL(N) \
2517 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2518 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2520 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2523 /* Two conversions are equal only if signedness and modes match. */
2524 switch (TREE_CODE (arg0))
2529 case FIX_TRUNC_EXPR:
2530 case FIX_FLOOR_EXPR:
2531 case FIX_ROUND_EXPR:
2532 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2533 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2543 case tcc_comparison:
2545 if (OP_SAME (0) && OP_SAME (1))
2548 /* For commutative ops, allow the other order. */
2549 return (commutative_tree_code (TREE_CODE (arg0))
2550 && operand_equal_p (TREE_OPERAND (arg0, 0),
2551 TREE_OPERAND (arg1, 1), flags)
2552 && operand_equal_p (TREE_OPERAND (arg0, 1),
2553 TREE_OPERAND (arg1, 0), flags));
2556 /* If either of the pointer (or reference) expressions we are
2557 dereferencing contain a side effect, these cannot be equal. */
2558 if (TREE_SIDE_EFFECTS (arg0)
2559 || TREE_SIDE_EFFECTS (arg1))
2562 switch (TREE_CODE (arg0))
2565 case ALIGN_INDIRECT_REF:
2566 case MISALIGNED_INDIRECT_REF:
2572 case ARRAY_RANGE_REF:
2573 /* Operands 2 and 3 may be null. */
2576 && OP_SAME_WITH_NULL (2)
2577 && OP_SAME_WITH_NULL (3));
2580 /* Handle operand 2 the same as for ARRAY_REF. */
2581 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2584 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2590 case tcc_expression:
2591 switch (TREE_CODE (arg0))
2594 case TRUTH_NOT_EXPR:
2597 case TRUTH_ANDIF_EXPR:
2598 case TRUTH_ORIF_EXPR:
2599 return OP_SAME (0) && OP_SAME (1);
2601 case TRUTH_AND_EXPR:
2603 case TRUTH_XOR_EXPR:
2604 if (OP_SAME (0) && OP_SAME (1))
2607 /* Otherwise take into account this is a commutative operation. */
2608 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2609 TREE_OPERAND (arg1, 1), flags)
2610 && operand_equal_p (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 0), flags));
2614 /* If the CALL_EXPRs call different functions, then they
2615 clearly can not be equal. */
2620 unsigned int cef = call_expr_flags (arg0);
2621 if (flags & OEP_PURE_SAME)
2622 cef &= ECF_CONST | ECF_PURE;
2629 /* Now see if all the arguments are the same. operand_equal_p
2630 does not handle TREE_LIST, so we walk the operands here
2631 feeding them to operand_equal_p. */
2632 arg0 = TREE_OPERAND (arg0, 1);
2633 arg1 = TREE_OPERAND (arg1, 1);
2634 while (arg0 && arg1)
2636 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2640 arg0 = TREE_CHAIN (arg0);
2641 arg1 = TREE_CHAIN (arg1);
2644 /* If we get here and both argument lists are exhausted
2645 then the CALL_EXPRs are equal. */
2646 return ! (arg0 || arg1);
2652 case tcc_declaration:
2653 /* Consider __builtin_sqrt equal to sqrt. */
2654 return (TREE_CODE (arg0) == FUNCTION_DECL
2655 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2656 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2657 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2664 #undef OP_SAME_WITH_NULL
2667 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2668 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2670 When in doubt, return 0. */
2673 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2675 int unsignedp1, unsignedpo;
2676 tree primarg0, primarg1, primother;
2677 unsigned int correct_width;
2679 if (operand_equal_p (arg0, arg1, 0))
2682 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2683 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2686 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2687 and see if the inner values are the same. This removes any
2688 signedness comparison, which doesn't matter here. */
2689 primarg0 = arg0, primarg1 = arg1;
2690 STRIP_NOPS (primarg0);
2691 STRIP_NOPS (primarg1);
2692 if (operand_equal_p (primarg0, primarg1, 0))
2695 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2696 actual comparison operand, ARG0.
2698 First throw away any conversions to wider types
2699 already present in the operands. */
2701 primarg1 = get_narrower (arg1, &unsignedp1);
2702 primother = get_narrower (other, &unsignedpo);
2704 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2705 if (unsignedp1 == unsignedpo
2706 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2707 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2709 tree type = TREE_TYPE (arg0);
2711 /* Make sure shorter operand is extended the right way
2712 to match the longer operand. */
2713 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2714 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2716 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2723 /* See if ARG is an expression that is either a comparison or is performing
2724 arithmetic on comparisons. The comparisons must only be comparing
2725 two different values, which will be stored in *CVAL1 and *CVAL2; if
2726 they are nonzero it means that some operands have already been found.
2727 No variables may be used anywhere else in the expression except in the
2728 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2729 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2731 If this is true, return 1. Otherwise, return zero. */
2734 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2736 enum tree_code code = TREE_CODE (arg);
2737 enum tree_code_class class = TREE_CODE_CLASS (code);
2739 /* We can handle some of the tcc_expression cases here. */
2740 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2742 else if (class == tcc_expression
2743 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2744 || code == COMPOUND_EXPR))
2747 else if (class == tcc_expression && code == SAVE_EXPR
2748 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2750 /* If we've already found a CVAL1 or CVAL2, this expression is
2751 two complex to handle. */
2752 if (*cval1 || *cval2)
2762 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2765 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p));
2772 case tcc_expression:
2773 if (code == COND_EXPR)
2774 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2775 cval1, cval2, save_p)
2776 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2777 cval1, cval2, save_p)
2778 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2779 cval1, cval2, save_p));
2782 case tcc_comparison:
2783 /* First see if we can handle the first operand, then the second. For
2784 the second operand, we know *CVAL1 can't be zero. It must be that
2785 one side of the comparison is each of the values; test for the
2786 case where this isn't true by failing if the two operands
2789 if (operand_equal_p (TREE_OPERAND (arg, 0),
2790 TREE_OPERAND (arg, 1), 0))
2794 *cval1 = TREE_OPERAND (arg, 0);
2795 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2797 else if (*cval2 == 0)
2798 *cval2 = TREE_OPERAND (arg, 0);
2799 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2804 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2806 else if (*cval2 == 0)
2807 *cval2 = TREE_OPERAND (arg, 1);
2808 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2820 /* ARG is a tree that is known to contain just arithmetic operations and
2821 comparisons. Evaluate the operations in the tree substituting NEW0 for
2822 any occurrence of OLD0 as an operand of a comparison and likewise for
2826 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2828 tree type = TREE_TYPE (arg);
2829 enum tree_code code = TREE_CODE (arg);
2830 enum tree_code_class class = TREE_CODE_CLASS (code);
2832 /* We can handle some of the tcc_expression cases here. */
2833 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2835 else if (class == tcc_expression
2836 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2842 return fold_build1 (code, type,
2843 eval_subst (TREE_OPERAND (arg, 0),
2844 old0, new0, old1, new1));
2847 return fold_build2 (code, type,
2848 eval_subst (TREE_OPERAND (arg, 0),
2849 old0, new0, old1, new1),
2850 eval_subst (TREE_OPERAND (arg, 1),
2851 old0, new0, old1, new1));
2853 case tcc_expression:
2857 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2860 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2863 return fold_build3 (code, type,
2864 eval_subst (TREE_OPERAND (arg, 0),
2865 old0, new0, old1, new1),
2866 eval_subst (TREE_OPERAND (arg, 1),
2867 old0, new0, old1, new1),
2868 eval_subst (TREE_OPERAND (arg, 2),
2869 old0, new0, old1, new1));
2873 /* Fall through - ??? */
2875 case tcc_comparison:
2877 tree arg0 = TREE_OPERAND (arg, 0);
2878 tree arg1 = TREE_OPERAND (arg, 1);
2880 /* We need to check both for exact equality and tree equality. The
2881 former will be true if the operand has a side-effect. In that
2882 case, we know the operand occurred exactly once. */
2884 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2886 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2889 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2891 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2894 return fold_build2 (code, type, arg0, arg1);
2902 /* Return a tree for the case when the result of an expression is RESULT
2903 converted to TYPE and OMITTED was previously an operand of the expression
2904 but is now not needed (e.g., we folded OMITTED * 0).
2906 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2907 the conversion of RESULT to TYPE. */
2910 omit_one_operand (tree type, tree result, tree omitted)
2912 tree t = fold_convert (type, result);
2914 if (TREE_SIDE_EFFECTS (omitted))
2915 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2917 return non_lvalue (t);
2920 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2923 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2925 tree t = fold_convert (type, result);
2927 if (TREE_SIDE_EFFECTS (omitted))
2928 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2930 return pedantic_non_lvalue (t);
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2935 of the expression but are now not needed.
2937 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2938 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2939 evaluated before OMITTED2. Otherwise, if neither has side effects,
2940 just do the conversion of RESULT to TYPE. */
2943 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2945 tree t = fold_convert (type, result);
2947 if (TREE_SIDE_EFFECTS (omitted2))
2948 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2949 if (TREE_SIDE_EFFECTS (omitted1))
2950 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2952 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2956 /* Return a simplified tree node for the truth-negation of ARG. This
2957 never alters ARG itself. We assume that ARG is an operation that
2958 returns a truth value (0 or 1).
2960 FIXME: one would think we would fold the result, but it causes
2961 problems with the dominator optimizer. */
2963 invert_truthvalue (tree arg)
2965 tree type = TREE_TYPE (arg);
2966 enum tree_code code = TREE_CODE (arg);
2968 if (code == ERROR_MARK)
2971 /* If this is a comparison, we can simply invert it, except for
2972 floating-point non-equality comparisons, in which case we just
2973 enclose a TRUTH_NOT_EXPR around what we have. */
2975 if (TREE_CODE_CLASS (code) == tcc_comparison)
2977 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2978 if (FLOAT_TYPE_P (op_type)
2979 && flag_trapping_math
2980 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2981 && code != NE_EXPR && code != EQ_EXPR)
2982 return build1 (TRUTH_NOT_EXPR, type, arg);
2985 code = invert_tree_comparison (code,
2986 HONOR_NANS (TYPE_MODE (op_type)));
2987 if (code == ERROR_MARK)
2988 return build1 (TRUTH_NOT_EXPR, type, arg);
2990 return build2 (code, type,
2991 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2998 return constant_boolean_node (integer_zerop (arg), type);
3000 case TRUTH_AND_EXPR:
3001 return build2 (TRUTH_OR_EXPR, type,
3002 invert_truthvalue (TREE_OPERAND (arg, 0)),
3003 invert_truthvalue (TREE_OPERAND (arg, 1)));
3006 return build2 (TRUTH_AND_EXPR, type,
3007 invert_truthvalue (TREE_OPERAND (arg, 0)),
3008 invert_truthvalue (TREE_OPERAND (arg, 1)));
3010 case TRUTH_XOR_EXPR:
3011 /* Here we can invert either operand. We invert the first operand
3012 unless the second operand is a TRUTH_NOT_EXPR in which case our
3013 result is the XOR of the first operand with the inside of the
3014 negation of the second operand. */
3016 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3017 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3018 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3020 return build2 (TRUTH_XOR_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 TREE_OPERAND (arg, 1));
3024 case TRUTH_ANDIF_EXPR:
3025 return build2 (TRUTH_ORIF_EXPR, type,
3026 invert_truthvalue (TREE_OPERAND (arg, 0)),
3027 invert_truthvalue (TREE_OPERAND (arg, 1)));
3029 case TRUTH_ORIF_EXPR:
3030 return build2 (TRUTH_ANDIF_EXPR, type,
3031 invert_truthvalue (TREE_OPERAND (arg, 0)),
3032 invert_truthvalue (TREE_OPERAND (arg, 1)));
3034 case TRUTH_NOT_EXPR:
3035 return TREE_OPERAND (arg, 0);
3039 tree arg1 = TREE_OPERAND (arg, 1);
3040 tree arg2 = TREE_OPERAND (arg, 2);
3041 /* A COND_EXPR may have a throw as one operand, which
3042 then has void type. Just leave void operands
3044 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3045 VOID_TYPE_P (TREE_TYPE (arg1))
3046 ? arg1 : invert_truthvalue (arg1),
3047 VOID_TYPE_P (TREE_TYPE (arg2))
3048 ? arg2 : invert_truthvalue (arg2));
3052 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3053 invert_truthvalue (TREE_OPERAND (arg, 1)));
3055 case NON_LVALUE_EXPR:
3056 return invert_truthvalue (TREE_OPERAND (arg, 0));
3059 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3064 return build1 (TREE_CODE (arg), type,
3065 invert_truthvalue (TREE_OPERAND (arg, 0)));
3068 if (!integer_onep (TREE_OPERAND (arg, 1)))
3070 return build2 (EQ_EXPR, type, arg,
3071 fold_convert (type, integer_zero_node));
3074 return build1 (TRUTH_NOT_EXPR, type, arg);
3076 case CLEANUP_POINT_EXPR:
3077 return build1 (CLEANUP_POINT_EXPR, type,
3078 invert_truthvalue (TREE_OPERAND (arg, 0)));
3083 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3084 return build1 (TRUTH_NOT_EXPR, type, arg);
3087 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3088 operands are another bit-wise operation with a common input. If so,
3089 distribute the bit operations to save an operation and possibly two if
3090 constants are involved. For example, convert
3091 (A | B) & (A | C) into A | (B & C)
3092 Further simplification will occur if B and C are constants.
3094 If this optimization cannot be done, 0 will be returned. */
3097 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3102 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3103 || TREE_CODE (arg0) == code
3104 || (TREE_CODE (arg0) != BIT_AND_EXPR
3105 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3108 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3110 common = TREE_OPERAND (arg0, 0);
3111 left = TREE_OPERAND (arg0, 1);
3112 right = TREE_OPERAND (arg1, 1);
3114 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3116 common = TREE_OPERAND (arg0, 0);
3117 left = TREE_OPERAND (arg0, 1);
3118 right = TREE_OPERAND (arg1, 0);
3120 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3122 common = TREE_OPERAND (arg0, 1);
3123 left = TREE_OPERAND (arg0, 0);
3124 right = TREE_OPERAND (arg1, 1);
3126 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3128 common = TREE_OPERAND (arg0, 1);
3129 left = TREE_OPERAND (arg0, 0);
3130 right = TREE_OPERAND (arg1, 0);
3135 return fold_build2 (TREE_CODE (arg0), type, common,
3136 fold_build2 (code, type, left, right));
3139 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3140 with code CODE. This optimization is unsafe. */
3142 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3144 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3145 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3147 /* (A / C) +- (B / C) -> (A +- B) / C. */
3149 && operand_equal_p (TREE_OPERAND (arg0, 1),
3150 TREE_OPERAND (arg1, 1), 0))
3151 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3152 fold_build2 (code, type,
3153 TREE_OPERAND (arg0, 0),
3154 TREE_OPERAND (arg1, 0)),
3155 TREE_OPERAND (arg0, 1));
3157 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3158 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3159 TREE_OPERAND (arg1, 0), 0)
3160 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3161 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3163 REAL_VALUE_TYPE r0, r1;
3164 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3165 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3167 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3169 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3170 real_arithmetic (&r0, code, &r0, &r1);
3171 return fold_build2 (MULT_EXPR, type,
3172 TREE_OPERAND (arg0, 0),
3173 build_real (type, r0));
3179 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3180 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3183 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3190 tree size = TYPE_SIZE (TREE_TYPE (inner));
3191 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3192 || POINTER_TYPE_P (TREE_TYPE (inner)))
3193 && host_integerp (size, 0)
3194 && tree_low_cst (size, 0) == bitsize)
3195 return fold_convert (type, inner);
3198 result = build3 (BIT_FIELD_REF, type, inner,
3199 size_int (bitsize), bitsize_int (bitpos));
3201 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3206 /* Optimize a bit-field compare.
3208 There are two cases: First is a compare against a constant and the
3209 second is a comparison of two items where the fields are at the same
3210 bit position relative to the start of a chunk (byte, halfword, word)
3211 large enough to contain it. In these cases we can avoid the shift
3212 implicit in bitfield extractions.
3214 For constants, we emit a compare of the shifted constant with the
3215 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3216 compared. For two fields at the same position, we do the ANDs with the
3217 similar mask and compare the result of the ANDs.
3219 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3220 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3221 are the left and right operands of the comparison, respectively.
3223 If the optimization described above can be done, we return the resulting
3224 tree. Otherwise we return zero. */
3227 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3230 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3231 tree type = TREE_TYPE (lhs);
3232 tree signed_type, unsigned_type;
3233 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3234 enum machine_mode lmode, rmode, nmode;
3235 int lunsignedp, runsignedp;
3236 int lvolatilep = 0, rvolatilep = 0;
3237 tree linner, rinner = NULL_TREE;
3241 /* Get all the information about the extractions being done. If the bit size
3242 if the same as the size of the underlying object, we aren't doing an
3243 extraction at all and so can do nothing. We also don't want to
3244 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3245 then will no longer be able to replace it. */
3246 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3247 &lunsignedp, &lvolatilep, false);
3248 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3249 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3254 /* If this is not a constant, we can only do something if bit positions,
3255 sizes, and signedness are the same. */
3256 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3257 &runsignedp, &rvolatilep, false);
3259 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3260 || lunsignedp != runsignedp || offset != 0
3261 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3265 /* See if we can find a mode to refer to this field. We should be able to,
3266 but fail if we can't. */
3267 nmode = get_best_mode (lbitsize, lbitpos,
3268 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3269 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3270 TYPE_ALIGN (TREE_TYPE (rinner))),
3271 word_mode, lvolatilep || rvolatilep);
3272 if (nmode == VOIDmode)
3275 /* Set signed and unsigned types of the precision of this mode for the
3277 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3278 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3280 /* Compute the bit position and size for the new reference and our offset
3281 within it. If the new reference is the same size as the original, we
3282 won't optimize anything, so return zero. */
3283 nbitsize = GET_MODE_BITSIZE (nmode);
3284 nbitpos = lbitpos & ~ (nbitsize - 1);
3286 if (nbitsize == lbitsize)
3289 if (BYTES_BIG_ENDIAN)
3290 lbitpos = nbitsize - lbitsize - lbitpos;
3292 /* Make the mask to be used against the extracted field. */
3293 mask = build_int_cst (unsigned_type, -1);
3294 mask = force_fit_type (mask, 0, false, false);
3295 mask = fold_convert (unsigned_type, mask);
3296 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3297 mask = const_binop (RSHIFT_EXPR, mask,
3298 size_int (nbitsize - lbitsize - lbitpos), 0);
3301 /* If not comparing with constant, just rework the comparison
3303 return build2 (code, compare_type,
3304 build2 (BIT_AND_EXPR, unsigned_type,
3305 make_bit_field_ref (linner, unsigned_type,
3306 nbitsize, nbitpos, 1),
3308 build2 (BIT_AND_EXPR, unsigned_type,
3309 make_bit_field_ref (rinner, unsigned_type,
3310 nbitsize, nbitpos, 1),
3313 /* Otherwise, we are handling the constant case. See if the constant is too
3314 big for the field. Warn and return a tree of for 0 (false) if so. We do
3315 this not only for its own sake, but to avoid having to test for this
3316 error case below. If we didn't, we might generate wrong code.
3318 For unsigned fields, the constant shifted right by the field length should
3319 be all zero. For signed fields, the high-order bits should agree with
3324 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3325 fold_convert (unsigned_type, rhs),
3326 size_int (lbitsize), 0)))
3328 warning (0, "comparison is always %d due to width of bit-field",
3330 return constant_boolean_node (code == NE_EXPR, compare_type);
3335 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3336 size_int (lbitsize - 1), 0);
3337 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3339 warning (0, "comparison is always %d due to width of bit-field",
3341 return constant_boolean_node (code == NE_EXPR, compare_type);
3345 /* Single-bit compares should always be against zero. */
3346 if (lbitsize == 1 && ! integer_zerop (rhs))
3348 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3349 rhs = fold_convert (type, integer_zero_node);
3352 /* Make a new bitfield reference, shift the constant over the
3353 appropriate number of bits and mask it with the computed mask
3354 (in case this was a signed field). If we changed it, make a new one. */
3355 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3358 TREE_SIDE_EFFECTS (lhs) = 1;
3359 TREE_THIS_VOLATILE (lhs) = 1;
3362 rhs = const_binop (BIT_AND_EXPR,
3363 const_binop (LSHIFT_EXPR,
3364 fold_convert (unsigned_type, rhs),
3365 size_int (lbitpos), 0),
3368 return build2 (code, compare_type,
3369 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3373 /* Subroutine for fold_truthop: decode a field reference.
3375 If EXP is a comparison reference, we return the innermost reference.
3377 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3378 set to the starting bit number.
3380 If the innermost field can be completely contained in a mode-sized
3381 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3383 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3384 otherwise it is not changed.
3386 *PUNSIGNEDP is set to the signedness of the field.
3388 *PMASK is set to the mask used. This is either contained in a
3389 BIT_AND_EXPR or derived from the width of the field.
3391 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3393 Return 0 if this is not a component reference or is one that we can't
3394 do anything with. */
3397 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3398 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3399 int *punsignedp, int *pvolatilep,
3400 tree *pmask, tree *pand_mask)
3402 tree outer_type = 0;
3404 tree mask, inner, offset;
3406 unsigned int precision;
3408 /* All the optimizations using this function assume integer fields.
3409 There are problems with FP fields since the type_for_size call
3410 below can fail for, e.g., XFmode. */
3411 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3414 /* We are interested in the bare arrangement of bits, so strip everything
3415 that doesn't affect the machine mode. However, record the type of the
3416 outermost expression if it may matter below. */
3417 if (TREE_CODE (exp) == NOP_EXPR
3418 || TREE_CODE (exp) == CONVERT_EXPR
3419 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3420 outer_type = TREE_TYPE (exp);
3423 if (TREE_CODE (exp) == BIT_AND_EXPR)
3425 and_mask = TREE_OPERAND (exp, 1);
3426 exp = TREE_OPERAND (exp, 0);
3427 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3428 if (TREE_CODE (and_mask) != INTEGER_CST)
3432 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3433 punsignedp, pvolatilep, false);
3434 if ((inner == exp && and_mask == 0)
3435 || *pbitsize < 0 || offset != 0
3436 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3439 /* If the number of bits in the reference is the same as the bitsize of
3440 the outer type, then the outer type gives the signedness. Otherwise
3441 (in case of a small bitfield) the signedness is unchanged. */
3442 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3443 *punsignedp = TYPE_UNSIGNED (outer_type);
3445 /* Compute the mask to access the bitfield. */
3446 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3447 precision = TYPE_PRECISION (unsigned_type);
3449 mask = build_int_cst (unsigned_type, -1);
3450 mask = force_fit_type (mask, 0, false, false);
3452 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3453 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3455 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3457 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3458 fold_convert (unsigned_type, and_mask), mask);
3461 *pand_mask = and_mask;
3465 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3469 all_ones_mask_p (tree mask, int size)
3471 tree type = TREE_TYPE (mask);
3472 unsigned int precision = TYPE_PRECISION (type);
3475 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3476 tmask = force_fit_type (tmask, 0, false, false);
3479 tree_int_cst_equal (mask,
3480 const_binop (RSHIFT_EXPR,
3481 const_binop (LSHIFT_EXPR, tmask,
3482 size_int (precision - size),
3484 size_int (precision - size), 0));
3487 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3488 represents the sign bit of EXP's type. If EXP represents a sign
3489 or zero extension, also test VAL against the unextended type.
3490 The return value is the (sub)expression whose sign bit is VAL,
3491 or NULL_TREE otherwise. */
3494 sign_bit_p (tree exp, tree val)
3496 unsigned HOST_WIDE_INT mask_lo, lo;
3497 HOST_WIDE_INT mask_hi, hi;
3501 /* Tree EXP must have an integral type. */
3502 t = TREE_TYPE (exp);
3503 if (! INTEGRAL_TYPE_P (t))
3506 /* Tree VAL must be an integer constant. */
3507 if (TREE_CODE (val) != INTEGER_CST
3508 || TREE_CONSTANT_OVERFLOW (val))
3511 width = TYPE_PRECISION (t);
3512 if (width > HOST_BITS_PER_WIDE_INT)
3514 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3517 mask_hi = ((unsigned HOST_WIDE_INT) -1
3518 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3524 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3527 mask_lo = ((unsigned HOST_WIDE_INT) -1
3528 >> (HOST_BITS_PER_WIDE_INT - width));
3531 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3532 treat VAL as if it were unsigned. */
3533 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3534 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3537 /* Handle extension from a narrower type. */
3538 if (TREE_CODE (exp) == NOP_EXPR
3539 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3540 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3545 /* Subroutine for fold_truthop: determine if an operand is simple enough
3546 to be evaluated unconditionally. */
3549 simple_operand_p (tree exp)
3551 /* Strip any conversions that don't change the machine mode. */
3554 return (CONSTANT_CLASS_P (exp)
3555 || TREE_CODE (exp) == SSA_NAME
3557 && ! TREE_ADDRESSABLE (exp)
3558 && ! TREE_THIS_VOLATILE (exp)
3559 && ! DECL_NONLOCAL (exp)
3560 /* Don't regard global variables as simple. They may be
3561 allocated in ways unknown to the compiler (shared memory,
3562 #pragma weak, etc). */
3563 && ! TREE_PUBLIC (exp)
3564 && ! DECL_EXTERNAL (exp)
3565 /* Loading a static variable is unduly expensive, but global
3566 registers aren't expensive. */
3567 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3570 /* The following functions are subroutines to fold_range_test and allow it to
3571 try to change a logical combination of comparisons into a range test.
3574 X == 2 || X == 3 || X == 4 || X == 5
3578 (unsigned) (X - 2) <= 3
3580 We describe each set of comparisons as being either inside or outside
3581 a range, using a variable named like IN_P, and then describe the
3582 range with a lower and upper bound. If one of the bounds is omitted,
3583 it represents either the highest or lowest value of the type.
3585 In the comments below, we represent a range by two numbers in brackets
3586 preceded by a "+" to designate being inside that range, or a "-" to
3587 designate being outside that range, so the condition can be inverted by
3588 flipping the prefix. An omitted bound is represented by a "-". For
3589 example, "- [-, 10]" means being outside the range starting at the lowest
3590 possible value and ending at 10, in other words, being greater than 10.
3591 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3594 We set up things so that the missing bounds are handled in a consistent
3595 manner so neither a missing bound nor "true" and "false" need to be
3596 handled using a special case. */
3598 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3599 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3600 and UPPER1_P are nonzero if the respective argument is an upper bound
3601 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3602 must be specified for a comparison. ARG1 will be converted to ARG0's
3603 type if both are specified. */
3606 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3607 tree arg1, int upper1_p)
3613 /* If neither arg represents infinity, do the normal operation.
3614 Else, if not a comparison, return infinity. Else handle the special
3615 comparison rules. Note that most of the cases below won't occur, but
3616 are handled for consistency. */
3618 if (arg0 != 0 && arg1 != 0)
3620 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3621 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3623 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3626 if (TREE_CODE_CLASS (code) != tcc_comparison)
3629 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3630 for neither. In real maths, we cannot assume open ended ranges are
3631 the same. But, this is computer arithmetic, where numbers are finite.
3632 We can therefore make the transformation of any unbounded range with
3633 the value Z, Z being greater than any representable number. This permits
3634 us to treat unbounded ranges as equal. */
3635 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3636 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3640 result = sgn0 == sgn1;
3643 result = sgn0 != sgn1;
3646 result = sgn0 < sgn1;
3649 result = sgn0 <= sgn1;
3652 result = sgn0 > sgn1;
3655 result = sgn0 >= sgn1;
3661 return constant_boolean_node (result, type);
3664 /* Given EXP, a logical expression, set the range it is testing into
3665 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3666 actually being tested. *PLOW and *PHIGH will be made of the same type
3667 as the returned expression. If EXP is not a comparison, we will most
3668 likely not be returning a useful value and range. */
3671 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3673 enum tree_code code;
3674 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3675 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3677 tree low, high, n_low, n_high;
3679 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3680 and see if we can refine the range. Some of the cases below may not
3681 happen, but it doesn't seem worth worrying about this. We "continue"
3682 the outer loop when we've changed something; otherwise we "break"
3683 the switch, which will "break" the while. */
3686 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3690 code = TREE_CODE (exp);
3691 exp_type = TREE_TYPE (exp);
3693 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3695 if (TREE_CODE_LENGTH (code) > 0)
3696 arg0 = TREE_OPERAND (exp, 0);
3697 if (TREE_CODE_CLASS (code) == tcc_comparison
3698 || TREE_CODE_CLASS (code) == tcc_unary
3699 || TREE_CODE_CLASS (code) == tcc_binary)
3700 arg0_type = TREE_TYPE (arg0);
3701 if (TREE_CODE_CLASS (code) == tcc_binary
3702 || TREE_CODE_CLASS (code) == tcc_comparison
3703 || (TREE_CODE_CLASS (code) == tcc_expression
3704 && TREE_CODE_LENGTH (code) > 1))
3705 arg1 = TREE_OPERAND (exp, 1);
3710 case TRUTH_NOT_EXPR:
3711 in_p = ! in_p, exp = arg0;
3714 case EQ_EXPR: case NE_EXPR:
3715 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3716 /* We can only do something if the range is testing for zero
3717 and if the second operand is an integer constant. Note that
3718 saying something is "in" the range we make is done by
3719 complementing IN_P since it will set in the initial case of
3720 being not equal to zero; "out" is leaving it alone. */
3721 if (low == 0 || high == 0
3722 || ! integer_zerop (low) || ! integer_zerop (high)
3723 || TREE_CODE (arg1) != INTEGER_CST)
3728 case NE_EXPR: /* - [c, c] */
3731 case EQ_EXPR: /* + [c, c] */
3732 in_p = ! in_p, low = high = arg1;
3734 case GT_EXPR: /* - [-, c] */
3735 low = 0, high = arg1;
3737 case GE_EXPR: /* + [c, -] */
3738 in_p = ! in_p, low = arg1, high = 0;
3740 case LT_EXPR: /* - [c, -] */
3741 low = arg1, high = 0;
3743 case LE_EXPR: /* + [-, c] */
3744 in_p = ! in_p, low = 0, high = arg1;
3750 /* If this is an unsigned comparison, we also know that EXP is
3751 greater than or equal to zero. We base the range tests we make
3752 on that fact, so we record it here so we can parse existing
3753 range tests. We test arg0_type since often the return type
3754 of, e.g. EQ_EXPR, is boolean. */
3755 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3757 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3759 fold_convert (arg0_type, integer_zero_node),
3763 in_p = n_in_p, low = n_low, high = n_high;
3765 /* If the high bound is missing, but we have a nonzero low
3766 bound, reverse the range so it goes from zero to the low bound
3768 if (high == 0 && low && ! integer_zerop (low))
3771 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3772 integer_one_node, 0);
3773 low = fold_convert (arg0_type, integer_zero_node);
3781 /* (-x) IN [a,b] -> x in [-b, -a] */
3782 n_low = range_binop (MINUS_EXPR, exp_type,
3783 fold_convert (exp_type, integer_zero_node),
3785 n_high = range_binop (MINUS_EXPR, exp_type,
3786 fold_convert (exp_type, integer_zero_node),
3788 low = n_low, high = n_high;
3794 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3795 fold_convert (exp_type, integer_one_node));
3798 case PLUS_EXPR: case MINUS_EXPR:
3799 if (TREE_CODE (arg1) != INTEGER_CST)
3802 /* If EXP is signed, any overflow in the computation is undefined,
3803 so we don't worry about it so long as our computations on
3804 the bounds don't overflow. For unsigned, overflow is defined
3805 and this is exactly the right thing. */
3806 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3807 arg0_type, low, 0, arg1, 0);
3808 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3809 arg0_type, high, 1, arg1, 0);
3810 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3811 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3814 /* Check for an unsigned range which has wrapped around the maximum
3815 value thus making n_high < n_low, and normalize it. */
3816 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3818 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3819 integer_one_node, 0);
3820 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3821 integer_one_node, 0);
3823 /* If the range is of the form +/- [ x+1, x ], we won't
3824 be able to normalize it. But then, it represents the
3825 whole range or the empty set, so make it
3827 if (tree_int_cst_equal (n_low, low)
3828 && tree_int_cst_equal (n_high, high))
3834 low = n_low, high = n_high;
3839 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3840 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3843 if (! INTEGRAL_TYPE_P (arg0_type)
3844 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3845 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3848 n_low = low, n_high = high;
3851 n_low = fold_convert (arg0_type, n_low);
3854 n_high = fold_convert (arg0_type, n_high);
3857 /* If we're converting arg0 from an unsigned type, to exp,
3858 a signed type, we will be doing the comparison as unsigned.
3859 The tests above have already verified that LOW and HIGH
3862 So we have to ensure that we will handle large unsigned
3863 values the same way that the current signed bounds treat
3866 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3869 tree equiv_type = lang_hooks.types.type_for_mode
3870 (TYPE_MODE (arg0_type), 1);
3872 /* A range without an upper bound is, naturally, unbounded.
3873 Since convert would have cropped a very large value, use
3874 the max value for the destination type. */
3876 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3877 : TYPE_MAX_VALUE (arg0_type);
3879 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3880 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3881 fold_convert (arg0_type,
3883 fold_convert (arg0_type,
3886 /* If the low bound is specified, "and" the range with the
3887 range for which the original unsigned value will be
3891 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3892 1, n_low, n_high, 1,
3893 fold_convert (arg0_type,
3898 in_p = (n_in_p == in_p);
3902 /* Otherwise, "or" the range with the range of the input
3903 that will be interpreted as negative. */
3904 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3905 0, n_low, n_high, 1,
3906 fold_convert (arg0_type,
3911 in_p = (in_p != n_in_p);
3916 low = n_low, high = n_high;
3926 /* If EXP is a constant, we can evaluate whether this is true or false. */
3927 if (TREE_CODE (exp) == INTEGER_CST)
3929 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3931 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3937 *pin_p = in_p, *plow = low, *phigh = high;
3941 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3942 type, TYPE, return an expression to test if EXP is in (or out of, depending
3943 on IN_P) the range. Return 0 if the test couldn't be created. */
3946 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3948 tree etype = TREE_TYPE (exp);
3951 #ifdef HAVE_canonicalize_funcptr_for_compare
3952 /* Disable this optimization for function pointer expressions
3953 on targets that require function pointer canonicalization. */
3954 if (HAVE_canonicalize_funcptr_for_compare
3955 && TREE_CODE (etype) == POINTER_TYPE
3956 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3962 value = build_range_check (type, exp, 1, low, high);
3964 return invert_truthvalue (value);
3969 if (low == 0 && high == 0)
3970 return fold_convert (type, integer_one_node);
3973 return fold_build2 (LE_EXPR, type, exp,
3974 fold_convert (etype, high));
3977 return fold_build2 (GE_EXPR, type, exp,
3978 fold_convert (etype, low));
3980 if (operand_equal_p (low, high, 0))
3981 return fold_build2 (EQ_EXPR, type, exp,
3982 fold_convert (etype, low));
3984 if (integer_zerop (low))
3986 if (! TYPE_UNSIGNED (etype))
3988 etype = lang_hooks.types.unsigned_type (etype);
3989 high = fold_convert (etype, high);
3990 exp = fold_convert (etype, exp);
3992 return build_range_check (type, exp, 1, 0, high);
3995 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3996 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3998 unsigned HOST_WIDE_INT lo;
4002 prec = TYPE_PRECISION (etype);
4003 if (prec <= HOST_BITS_PER_WIDE_INT)
4006 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4010 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4011 lo = (unsigned HOST_WIDE_INT) -1;
4014 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4016 if (TYPE_UNSIGNED (etype))
4018 etype = lang_hooks.types.signed_type (etype);
4019 exp = fold_convert (etype, exp);
4021 return fold_build2 (GT_EXPR, type, exp,
4022 fold_convert (etype, integer_zero_node));
4026 value = const_binop (MINUS_EXPR, high, low, 0);
4027 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4028 && ! TYPE_UNSIGNED (etype))
4030 tree utype, minv, maxv;
4032 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4033 for the type in question, as we rely on this here. */
4034 switch (TREE_CODE (etype))
4039 /* There is no requirement that LOW be within the range of ETYPE
4040 if the latter is a subtype. It must, however, be within the base
4041 type of ETYPE. So be sure we do the subtraction in that type. */
4042 if (TREE_TYPE (etype))
4043 etype = TREE_TYPE (etype);
4044 utype = lang_hooks.types.unsigned_type (etype);
4045 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4046 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4047 integer_one_node, 1);
4048 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4049 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4053 high = fold_convert (etype, high);
4054 low = fold_convert (etype, low);
4055 exp = fold_convert (etype, exp);
4056 value = const_binop (MINUS_EXPR, high, low, 0);
4064 if (value != 0 && ! TREE_OVERFLOW (value))
4066 /* There is no requirement that LOW be within the range of ETYPE
4067 if the latter is a subtype. It must, however, be within the base
4068 type of ETYPE. So be sure we do the subtraction in that type. */
4069 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4071 etype = TREE_TYPE (etype);
4072 exp = fold_convert (etype, exp);
4073 low = fold_convert (etype, low);
4074 value = fold_convert (etype, value);
4077 return build_range_check (type,
4078 fold_build2 (MINUS_EXPR, etype, exp, low),
4079 1, build_int_cst (etype, 0), value);
4085 /* Given two ranges, see if we can merge them into one. Return 1 if we
4086 can, 0 if we can't. Set the output range into the specified parameters. */
4089 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4090 tree high0, int in1_p, tree low1, tree high1)
4098 int lowequal = ((low0 == 0 && low1 == 0)
4099 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4100 low0, 0, low1, 0)));
4101 int highequal = ((high0 == 0 && high1 == 0)
4102 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4103 high0, 1, high1, 1)));
4105 /* Make range 0 be the range that starts first, or ends last if they
4106 start at the same value. Swap them if it isn't. */
4107 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4110 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4111 high1, 1, high0, 1))))
4113 temp = in0_p, in0_p = in1_p, in1_p = temp;
4114 tem = low0, low0 = low1, low1 = tem;
4115 tem = high0, high0 = high1, high1 = tem;
4118 /* Now flag two cases, whether the ranges are disjoint or whether the
4119 second range is totally subsumed in the first. Note that the tests
4120 below are simplified by the ones above. */
4121 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4122 high0, 1, low1, 0));
4123 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4124 high1, 1, high0, 1));
4126 /* We now have four cases, depending on whether we are including or
4127 excluding the two ranges. */
4130 /* If they don't overlap, the result is false. If the second range
4131 is a subset it is the result. Otherwise, the range is from the start
4132 of the second to the end of the first. */
4134 in_p = 0, low = high = 0;
4136 in_p = 1, low = low1, high = high1;
4138 in_p = 1, low = low1, high = high0;
4141 else if (in0_p && ! in1_p)
4143 /* If they don't overlap, the result is the first range. If they are
4144 equal, the result is false. If the second range is a subset of the
4145 first, and the ranges begin at the same place, we go from just after
4146 the end of the first range to the end of the second. If the second
4147 range is not a subset of the first, or if it is a subset and both
4148 ranges end at the same place, the range starts at the start of the
4149 first range and ends just before the second range.
4150 Otherwise, we can't describe this as a single range. */
4152 in_p = 1, low = low0, high = high0;
4153 else if (lowequal && highequal)
4154 in_p = 0, low = high = 0;
4155 else if (subset && lowequal)
4157 in_p = 1, high = high0;
4158 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4159 integer_one_node, 0);
4161 else if (! subset || highequal)
4163 in_p = 1, low = low0;
4164 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4165 integer_one_node, 0);
4171 else if (! in0_p && in1_p)
4173 /* If they don't overlap, the result is the second range. If the second
4174 is a subset of the first, the result is false. Otherwise,
4175 the range starts just after the first range and ends at the
4176 end of the second. */
4178 in_p = 1, low = low1, high = high1;
4179 else if (subset || highequal)
4180 in_p = 0, low = high = 0;
4183 in_p = 1, high = high1;
4184 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4185 integer_one_node, 0);
4191 /* The case where we are excluding both ranges. Here the complex case
4192 is if they don't overlap. In that case, the only time we have a
4193 range is if they are adjacent. If the second is a subset of the
4194 first, the result is the first. Otherwise, the range to exclude
4195 starts at the beginning of the first range and ends at the end of the
4199 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4200 range_binop (PLUS_EXPR, NULL_TREE,
4202 integer_one_node, 1),
4204 in_p = 0, low = low0, high = high1;
4207 /* Canonicalize - [min, x] into - [-, x]. */
4208 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4209 switch (TREE_CODE (TREE_TYPE (low0)))
4212 if (TYPE_PRECISION (TREE_TYPE (low0))
4213 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4218 if (tree_int_cst_equal (low0,
4219 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4223 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4224 && integer_zerop (low0))
4231 /* Canonicalize - [x, max] into - [x, -]. */
4232 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4233 switch (TREE_CODE (TREE_TYPE (high1)))
4236 if (TYPE_PRECISION (TREE_TYPE (high1))
4237 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4242 if (tree_int_cst_equal (high1,
4243 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4247 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4248 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4250 integer_one_node, 1)))
4257 /* The ranges might be also adjacent between the maximum and
4258 minimum values of the given type. For
4259 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4260 return + [x + 1, y - 1]. */
4261 if (low0 == 0 && high1 == 0)
4263 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4264 integer_one_node, 1);
4265 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4266 integer_one_node, 0);
4267 if (low == 0 || high == 0)
4277 in_p = 0, low = low0, high = high0;
4279 in_p = 0, low = low0, high = high1;
4282 *pin_p = in_p, *plow = low, *phigh = high;
4287 /* Subroutine of fold, looking inside expressions of the form
4288 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4289 of the COND_EXPR. This function is being used also to optimize
4290 A op B ? C : A, by reversing the comparison first.
4292 Return a folded expression whose code is not a COND_EXPR
4293 anymore, or NULL_TREE if no folding opportunity is found. */
4296 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4298 enum tree_code comp_code = TREE_CODE (arg0);
4299 tree arg00 = TREE_OPERAND (arg0, 0);
4300 tree arg01 = TREE_OPERAND (arg0, 1);
4301 tree arg1_type = TREE_TYPE (arg1);
4307 /* If we have A op 0 ? A : -A, consider applying the following
4310 A == 0? A : -A same as -A
4311 A != 0? A : -A same as A
4312 A >= 0? A : -A same as abs (A)
4313 A > 0? A : -A same as abs (A)
4314 A <= 0? A : -A same as -abs (A)
4315 A < 0? A : -A same as -abs (A)
4317 None of these transformations work for modes with signed
4318 zeros. If A is +/-0, the first two transformations will
4319 change the sign of the result (from +0 to -0, or vice
4320 versa). The last four will fix the sign of the result,
4321 even though the original expressions could be positive or
4322 negative, depending on the sign of A.
4324 Note that all these transformations are correct if A is
4325 NaN, since the two alternatives (A and -A) are also NaNs. */
4326 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4327 ? real_zerop (arg01)
4328 : integer_zerop (arg01))
4329 && ((TREE_CODE (arg2) == NEGATE_EXPR
4330 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4331 /* In the case that A is of the form X-Y, '-A' (arg2) may
4332 have already been folded to Y-X, check for that. */
4333 || (TREE_CODE (arg1) == MINUS_EXPR
4334 && TREE_CODE (arg2) == MINUS_EXPR
4335 && operand_equal_p (TREE_OPERAND (arg1, 0),
4336 TREE_OPERAND (arg2, 1), 0)
4337 && operand_equal_p (TREE_OPERAND (arg1, 1),
4338 TREE_OPERAND (arg2, 0), 0))))
4343 tem = fold_convert (arg1_type, arg1);
4344 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4347 return pedantic_non_lvalue (fold_convert (type, arg1));
4350 if (flag_trapping_math)
4355 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4356 arg1 = fold_convert (lang_hooks.types.signed_type
4357 (TREE_TYPE (arg1)), arg1);
4358 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4359 return pedantic_non_lvalue (fold_convert (type, tem));
4362 if (flag_trapping_math)
4366 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4367 arg1 = fold_convert (lang_hooks.types.signed_type
4368 (TREE_TYPE (arg1)), arg1);
4369 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4370 return negate_expr (fold_convert (type, tem));
4372 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4376 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4377 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4378 both transformations are correct when A is NaN: A != 0
4379 is then true, and A == 0 is false. */
4381 if (integer_zerop (arg01) && integer_zerop (arg2))
4383 if (comp_code == NE_EXPR)
4384 return pedantic_non_lvalue (fold_convert (type, arg1));
4385 else if (comp_code == EQ_EXPR)
4386 return fold_convert (type, integer_zero_node);
4389 /* Try some transformations of A op B ? A : B.
4391 A == B? A : B same as B
4392 A != B? A : B same as A
4393 A >= B? A : B same as max (A, B)
4394 A > B? A : B same as max (B, A)
4395 A <= B? A : B same as min (A, B)
4396 A < B? A : B same as min (B, A)
4398 As above, these transformations don't work in the presence
4399 of signed zeros. For example, if A and B are zeros of
4400 opposite sign, the first two transformations will change
4401 the sign of the result. In the last four, the original
4402 expressions give different results for (A=+0, B=-0) and
4403 (A=-0, B=+0), but the transformed expressions do not.
4405 The first two transformations are correct if either A or B
4406 is a NaN. In the first transformation, the condition will
4407 be false, and B will indeed be chosen. In the case of the
4408 second transformation, the condition A != B will be true,
4409 and A will be chosen.
4411 The conversions to max() and min() are not correct if B is
4412 a number and A is not. The conditions in the original
4413 expressions will be false, so all four give B. The min()
4414 and max() versions would give a NaN instead. */
4415 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4416 /* Avoid these transformations if the COND_EXPR may be used
4417 as an lvalue in the C++ front-end. PR c++/19199. */
4419 || strcmp (lang_hooks.name, "GNU C++") != 0
4420 || ! maybe_lvalue_p (arg1)
4421 || ! maybe_lvalue_p (arg2)))
4423 tree comp_op0 = arg00;
4424 tree comp_op1 = arg01;
4425 tree comp_type = TREE_TYPE (comp_op0);
4427 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4428 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4438 return pedantic_non_lvalue (fold_convert (type, arg2));
4440 return pedantic_non_lvalue (fold_convert (type, arg1));
4445 /* In C++ a ?: expression can be an lvalue, so put the
4446 operand which will be used if they are equal first
4447 so that we can convert this back to the
4448 corresponding COND_EXPR. */
4449 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4451 comp_op0 = fold_convert (comp_type, comp_op0);
4452 comp_op1 = fold_convert (comp_type, comp_op1);
4453 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4454 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4455 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4456 return pedantic_non_lvalue (fold_convert (type, tem));
4463 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4465 comp_op0 = fold_convert (comp_type, comp_op0);
4466 comp_op1 = fold_convert (comp_type, comp_op1);
4467 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4468 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4469 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4470 return pedantic_non_lvalue (fold_convert (type, tem));
4474 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4475 return pedantic_non_lvalue (fold_convert (type, arg2));
4478 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4479 return pedantic_non_lvalue (fold_convert (type, arg1));
4482 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4487 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4488 we might still be able to simplify this. For example,
4489 if C1 is one less or one more than C2, this might have started
4490 out as a MIN or MAX and been transformed by this function.
4491 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4493 if (INTEGRAL_TYPE_P (type)
4494 && TREE_CODE (arg01) == INTEGER_CST
4495 && TREE_CODE (arg2) == INTEGER_CST)
4499 /* We can replace A with C1 in this case. */
4500 arg1 = fold_convert (type, arg01);
4501 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4504 /* If C1 is C2 + 1, this is min(A, C2). */
4505 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4507 && operand_equal_p (arg01,
4508 const_binop (PLUS_EXPR, arg2,
4509 integer_one_node, 0),
4511 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4516 /* If C1 is C2 - 1, this is min(A, C2). */
4517 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4519 && operand_equal_p (arg01,
4520 const_binop (MINUS_EXPR, arg2,
4521 integer_one_node, 0),
4523 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4528 /* If C1 is C2 - 1, this is max(A, C2). */
4529 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4531 && operand_equal_p (arg01,
4532 const_binop (MINUS_EXPR, arg2,
4533 integer_one_node, 0),
4535 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4540 /* If C1 is C2 + 1, this is max(A, C2). */
4541 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4543 && operand_equal_p (arg01,
4544 const_binop (PLUS_EXPR, arg2,
4545 integer_one_node, 0),
4547 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4561 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4562 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4565 /* EXP is some logical combination of boolean tests. See if we can
4566 merge it into some range test. Return the new tree if so. */
4569 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4571 int or_op = (code == TRUTH_ORIF_EXPR
4572 || code == TRUTH_OR_EXPR);
4573 int in0_p, in1_p, in_p;
4574 tree low0, low1, low, high0, high1, high;
4575 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4576 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4579 /* If this is an OR operation, invert both sides; we will invert
4580 again at the end. */
4582 in0_p = ! in0_p, in1_p = ! in1_p;
4584 /* If both expressions are the same, if we can merge the ranges, and we
4585 can build the range test, return it or it inverted. If one of the
4586 ranges is always true or always false, consider it to be the same
4587 expression as the other. */
4588 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4589 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4591 && 0 != (tem = (build_range_check (type,
4593 : rhs != 0 ? rhs : integer_zero_node,
4595 return or_op ? invert_truthvalue (tem) : tem;
4597 /* On machines where the branch cost is expensive, if this is a
4598 short-circuited branch and the underlying object on both sides
4599 is the same, make a non-short-circuit operation. */
4600 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4601 && lhs != 0 && rhs != 0
4602 && (code == TRUTH_ANDIF_EXPR
4603 || code == TRUTH_ORIF_EXPR)
4604 && operand_equal_p (lhs, rhs, 0))
4606 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4607 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4608 which cases we can't do this. */
4609 if (simple_operand_p (lhs))
4610 return build2 (code == TRUTH_ANDIF_EXPR
4611 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4614 else if (lang_hooks.decls.global_bindings_p () == 0
4615 && ! CONTAINS_PLACEHOLDER_P (lhs))
4617 tree common = save_expr (lhs);
4619 if (0 != (lhs = build_range_check (type, common,
4620 or_op ? ! in0_p : in0_p,
4622 && (0 != (rhs = build_range_check (type, common,
4623 or_op ? ! in1_p : in1_p,
4625 return build2 (code == TRUTH_ANDIF_EXPR
4626 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4634 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4635 bit value. Arrange things so the extra bits will be set to zero if and
4636 only if C is signed-extended to its full width. If MASK is nonzero,
4637 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4640 unextend (tree c, int p, int unsignedp, tree mask)
4642 tree type = TREE_TYPE (c);
4643 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4646 if (p == modesize || unsignedp)
4649 /* We work by getting just the sign bit into the low-order bit, then
4650 into the high-order bit, then sign-extend. We then XOR that value
4652 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4653 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4655 /* We must use a signed type in order to get an arithmetic right shift.
4656 However, we must also avoid introducing accidental overflows, so that
4657 a subsequent call to integer_zerop will work. Hence we must
4658 do the type conversion here. At this point, the constant is either
4659 zero or one, and the conversion to a signed type can never overflow.
4660 We could get an overflow if this conversion is done anywhere else. */
4661 if (TYPE_UNSIGNED (type))
4662 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4664 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4665 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4667 temp = const_binop (BIT_AND_EXPR, temp,
4668 fold_convert (TREE_TYPE (c), mask), 0);
4669 /* If necessary, convert the type back to match the type of C. */
4670 if (TYPE_UNSIGNED (type))
4671 temp = fold_convert (type, temp);
4673 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4676 /* Find ways of folding logical expressions of LHS and RHS:
4677 Try to merge two comparisons to the same innermost item.
4678 Look for range tests like "ch >= '0' && ch <= '9'".
4679 Look for combinations of simple terms on machines with expensive branches
4680 and evaluate the RHS unconditionally.
4682 For example, if we have p->a == 2 && p->b == 4 and we can make an
4683 object large enough to span both A and B, we can do this with a comparison
4684 against the object ANDed with the a mask.
4686 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4687 operations to do this with one comparison.
4689 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4690 function and the one above.
4692 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4693 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4695 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4698 We return the simplified tree or 0 if no optimization is possible. */
4701 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4703 /* If this is the "or" of two comparisons, we can do something if
4704 the comparisons are NE_EXPR. If this is the "and", we can do something
4705 if the comparisons are EQ_EXPR. I.e.,
4706 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4708 WANTED_CODE is this operation code. For single bit fields, we can
4709 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4710 comparison for one-bit fields. */
4712 enum tree_code wanted_code;
4713 enum tree_code lcode, rcode;
4714 tree ll_arg, lr_arg, rl_arg, rr_arg;
4715 tree ll_inner, lr_inner, rl_inner, rr_inner;
4716 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4717 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4718 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4719 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4720 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4721 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4722 enum machine_mode lnmode, rnmode;
4723 tree ll_mask, lr_mask, rl_mask, rr_mask;
4724 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4725 tree l_const, r_const;
4726 tree lntype, rntype, result;
4727 int first_bit, end_bit;
4730 /* Start by getting the comparison codes. Fail if anything is volatile.
4731 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4732 it were surrounded with a NE_EXPR. */
4734 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4737 lcode = TREE_CODE (lhs);
4738 rcode = TREE_CODE (rhs);
4740 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4742 lhs = build2 (NE_EXPR, truth_type, lhs,
4743 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4747 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4749 rhs = build2 (NE_EXPR, truth_type, rhs,
4750 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4754 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4755 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4758 ll_arg = TREE_OPERAND (lhs, 0);
4759 lr_arg = TREE_OPERAND (lhs, 1);
4760 rl_arg = TREE_OPERAND (rhs, 0);
4761 rr_arg = TREE_OPERAND (rhs, 1);
4763 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4764 if (simple_operand_p (ll_arg)
4765 && simple_operand_p (lr_arg))
4768 if (operand_equal_p (ll_arg, rl_arg, 0)
4769 && operand_equal_p (lr_arg, rr_arg, 0))
4771 result = combine_comparisons (code, lcode, rcode,
4772 truth_type, ll_arg, lr_arg);
4776 else if (operand_equal_p (ll_arg, rr_arg, 0)
4777 && operand_equal_p (lr_arg, rl_arg, 0))
4779 result = combine_comparisons (code, lcode,
4780 swap_tree_comparison (rcode),
4781 truth_type, ll_arg, lr_arg);
4787 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4788 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4790 /* If the RHS can be evaluated unconditionally and its operands are
4791 simple, it wins to evaluate the RHS unconditionally on machines
4792 with expensive branches. In this case, this isn't a comparison
4793 that can be merged. Avoid doing this if the RHS is a floating-point
4794 comparison since those can trap. */
4796 if (BRANCH_COST >= 2
4797 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4798 && simple_operand_p (rl_arg)
4799 && simple_operand_p (rr_arg))
4801 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4802 if (code == TRUTH_OR_EXPR
4803 && lcode == NE_EXPR && integer_zerop (lr_arg)
4804 && rcode == NE_EXPR && integer_zerop (rr_arg)
4805 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4806 return build2 (NE_EXPR, truth_type,
4807 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4809 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4811 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4812 if (code == TRUTH_AND_EXPR
4813 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4814 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4815 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4816 return build2 (EQ_EXPR, truth_type,
4817 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4819 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4821 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4822 return build2 (code, truth_type, lhs, rhs);
4825 /* See if the comparisons can be merged. Then get all the parameters for
4828 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4829 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4833 ll_inner = decode_field_reference (ll_arg,
4834 &ll_bitsize, &ll_bitpos, &ll_mode,
4835 &ll_unsignedp, &volatilep, &ll_mask,
4837 lr_inner = decode_field_reference (lr_arg,
4838 &lr_bitsize, &lr_bitpos, &lr_mode,
4839 &lr_unsignedp, &volatilep, &lr_mask,
4841 rl_inner = decode_field_reference (rl_arg,
4842 &rl_bitsize, &rl_bitpos, &rl_mode,
4843 &rl_unsignedp, &volatilep, &rl_mask,
4845 rr_inner = decode_field_reference (rr_arg,
4846 &rr_bitsize, &rr_bitpos, &rr_mode,
4847 &rr_unsignedp, &volatilep, &rr_mask,
4850 /* It must be true that the inner operation on the lhs of each
4851 comparison must be the same if we are to be able to do anything.
4852 Then see if we have constants. If not, the same must be true for
4854 if (volatilep || ll_inner == 0 || rl_inner == 0
4855 || ! operand_equal_p (ll_inner, rl_inner, 0))
4858 if (TREE_CODE (lr_arg) == INTEGER_CST
4859 && TREE_CODE (rr_arg) == INTEGER_CST)
4860 l_const = lr_arg, r_const = rr_arg;
4861 else if (lr_inner == 0 || rr_inner == 0
4862 || ! operand_equal_p (lr_inner, rr_inner, 0))
4865 l_const = r_const = 0;
4867 /* If either comparison code is not correct for our logical operation,
4868 fail. However, we can convert a one-bit comparison against zero into
4869 the opposite comparison against that bit being set in the field. */
4871 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4872 if (lcode != wanted_code)
4874 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4876 /* Make the left operand unsigned, since we are only interested
4877 in the value of one bit. Otherwise we are doing the wrong
4886 /* This is analogous to the code for l_const above. */
4887 if (rcode != wanted_code)
4889 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4898 /* After this point all optimizations will generate bit-field
4899 references, which we might not want. */
4900 if (! lang_hooks.can_use_bit_fields_p ())
4903 /* See if we can find a mode that contains both fields being compared on
4904 the left. If we can't, fail. Otherwise, update all constants and masks
4905 to be relative to a field of that size. */
4906 first_bit = MIN (ll_bitpos, rl_bitpos);
4907 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4908 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4909 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4911 if (lnmode == VOIDmode)
4914 lnbitsize = GET_MODE_BITSIZE (lnmode);
4915 lnbitpos = first_bit & ~ (lnbitsize - 1);
4916 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4917 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4919 if (BYTES_BIG_ENDIAN)
4921 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4922 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4925 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4926 size_int (xll_bitpos), 0);
4927 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4928 size_int (xrl_bitpos), 0);
4932 l_const = fold_convert (lntype, l_const);
4933 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4934 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4935 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4936 fold_build1 (BIT_NOT_EXPR,
4940 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4942 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4947 r_const = fold_convert (lntype, r_const);
4948 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4949 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4950 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4951 fold_build1 (BIT_NOT_EXPR,
4955 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4957 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4961 /* If the right sides are not constant, do the same for it. Also,
4962 disallow this optimization if a size or signedness mismatch occurs
4963 between the left and right sides. */
4966 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4967 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4968 /* Make sure the two fields on the right
4969 correspond to the left without being swapped. */
4970 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4973 first_bit = MIN (lr_bitpos, rr_bitpos);
4974 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4975 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4976 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4978 if (rnmode == VOIDmode)
4981 rnbitsize = GET_MODE_BITSIZE (rnmode);
4982 rnbitpos = first_bit & ~ (rnbitsize - 1);
4983 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4984 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4986 if (BYTES_BIG_ENDIAN)
4988 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4989 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4992 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4993 size_int (xlr_bitpos), 0);
4994 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4995 size_int (xrr_bitpos), 0);
4997 /* Make a mask that corresponds to both fields being compared.
4998 Do this for both items being compared. If the operands are the
4999 same size and the bits being compared are in the same position
5000 then we can do this by masking both and comparing the masked
5002 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5003 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5004 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5006 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5007 ll_unsignedp || rl_unsignedp);
5008 if (! all_ones_mask_p (ll_mask, lnbitsize))
5009 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5011 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5012 lr_unsignedp || rr_unsignedp);
5013 if (! all_ones_mask_p (lr_mask, rnbitsize))
5014 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5016 return build2 (wanted_code, truth_type, lhs, rhs);
5019 /* There is still another way we can do something: If both pairs of
5020 fields being compared are adjacent, we may be able to make a wider
5021 field containing them both.
5023 Note that we still must mask the lhs/rhs expressions. Furthermore,
5024 the mask must be shifted to account for the shift done by
5025 make_bit_field_ref. */
5026 if ((ll_bitsize + ll_bitpos == rl_bitpos
5027 && lr_bitsize + lr_bitpos == rr_bitpos)
5028 || (ll_bitpos == rl_bitpos + rl_bitsize
5029 && lr_bitpos == rr_bitpos + rr_bitsize))
5033 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5034 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5035 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5036 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5038 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5039 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5040 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5041 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5043 /* Convert to the smaller type before masking out unwanted bits. */
5045 if (lntype != rntype)
5047 if (lnbitsize > rnbitsize)
5049 lhs = fold_convert (rntype, lhs);
5050 ll_mask = fold_convert (rntype, ll_mask);
5053 else if (lnbitsize < rnbitsize)
5055 rhs = fold_convert (lntype, rhs);
5056 lr_mask = fold_convert (lntype, lr_mask);
5061 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5062 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5064 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5065 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5067 return build2 (wanted_code, truth_type, lhs, rhs);
5073 /* Handle the case of comparisons with constants. If there is something in
5074 common between the masks, those bits of the constants must be the same.
5075 If not, the condition is always false. Test for this to avoid generating
5076 incorrect code below. */
5077 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5078 if (! integer_zerop (result)
5079 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5080 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5082 if (wanted_code == NE_EXPR)
5084 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5085 return constant_boolean_node (true, truth_type);
5089 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5090 return constant_boolean_node (false, truth_type);
5094 /* Construct the expression we will return. First get the component
5095 reference we will make. Unless the mask is all ones the width of
5096 that field, perform the mask operation. Then compare with the
5098 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5099 ll_unsignedp || rl_unsignedp);
5101 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5102 if (! all_ones_mask_p (ll_mask, lnbitsize))
5103 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5105 return build2 (wanted_code, truth_type, result,
5106 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5109 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5113 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5116 enum tree_code op_code;
5117 tree comp_const = op1;
5119 int consts_equal, consts_lt;
5122 STRIP_SIGN_NOPS (arg0);
5124 op_code = TREE_CODE (arg0);
5125 minmax_const = TREE_OPERAND (arg0, 1);
5126 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5127 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5128 inner = TREE_OPERAND (arg0, 0);
5130 /* If something does not permit us to optimize, return the original tree. */
5131 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5132 || TREE_CODE (comp_const) != INTEGER_CST
5133 || TREE_CONSTANT_OVERFLOW (comp_const)
5134 || TREE_CODE (minmax_const) != INTEGER_CST
5135 || TREE_CONSTANT_OVERFLOW (minmax_const))
5138 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5139 and GT_EXPR, doing the rest with recursive calls using logical
5143 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5145 /* FIXME: We should be able to invert code without building a
5146 scratch tree node, but doing so would require us to
5147 duplicate a part of invert_truthvalue here. */
5148 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5149 tem = optimize_minmax_comparison (TREE_CODE (tem),
5151 TREE_OPERAND (tem, 0),
5152 TREE_OPERAND (tem, 1));
5153 return invert_truthvalue (tem);
5158 fold_build2 (TRUTH_ORIF_EXPR, type,
5159 optimize_minmax_comparison
5160 (EQ_EXPR, type, arg0, comp_const),
5161 optimize_minmax_comparison
5162 (GT_EXPR, type, arg0, comp_const));
5165 if (op_code == MAX_EXPR && consts_equal)
5166 /* MAX (X, 0) == 0 -> X <= 0 */
5167 return fold_build2 (LE_EXPR, type, inner, comp_const);
5169 else if (op_code == MAX_EXPR && consts_lt)
5170 /* MAX (X, 0) == 5 -> X == 5 */
5171 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5173 else if (op_code == MAX_EXPR)
5174 /* MAX (X, 0) == -1 -> false */
5175 return omit_one_operand (type, integer_zero_node, inner);
5177 else if (consts_equal)
5178 /* MIN (X, 0) == 0 -> X >= 0 */
5179 return fold_build2 (GE_EXPR, type, inner, comp_const);
5182 /* MIN (X, 0) == 5 -> false */
5183 return omit_one_operand (type, integer_zero_node, inner);
5186 /* MIN (X, 0) == -1 -> X == -1 */
5187 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5190 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5191 /* MAX (X, 0) > 0 -> X > 0
5192 MAX (X, 0) > 5 -> X > 5 */
5193 return fold_build2 (GT_EXPR, type, inner, comp_const);
5195 else if (op_code == MAX_EXPR)
5196 /* MAX (X, 0) > -1 -> true */
5197 return omit_one_operand (type, integer_one_node, inner);
5199 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5200 /* MIN (X, 0) > 0 -> false
5201 MIN (X, 0) > 5 -> false */
5202 return omit_one_operand (type, integer_zero_node, inner);
5205 /* MIN (X, 0) > -1 -> X > -1 */
5206 return fold_build2 (GT_EXPR, type, inner, comp_const);
5213 /* T is an integer expression that is being multiplied, divided, or taken a
5214 modulus (CODE says which and what kind of divide or modulus) by a
5215 constant C. See if we can eliminate that operation by folding it with
5216 other operations already in T. WIDE_TYPE, if non-null, is a type that
5217 should be used for the computation if wider than our type.
5219 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5220 (X * 2) + (Y * 4). We must, however, be assured that either the original
5221 expression would not overflow or that overflow is undefined for the type
5222 in the language in question.
5224 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5225 the machine has a multiply-accumulate insn or that this is part of an
5226 addressing calculation.
5228 If we return a non-null expression, it is an equivalent form of the
5229 original computation, but need not be in the original type. */
5232 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5234 /* To avoid exponential search depth, refuse to allow recursion past
5235 three levels. Beyond that (1) it's highly unlikely that we'll find
5236 something interesting and (2) we've probably processed it before
5237 when we built the inner expression. */
5246 ret = extract_muldiv_1 (t, c, code, wide_type);
5253 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5255 tree type = TREE_TYPE (t);
5256 enum tree_code tcode = TREE_CODE (t);
5257 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5258 > GET_MODE_SIZE (TYPE_MODE (type)))
5259 ? wide_type : type);
5261 int same_p = tcode == code;
5262 tree op0 = NULL_TREE, op1 = NULL_TREE;
5264 /* Don't deal with constants of zero here; they confuse the code below. */
5265 if (integer_zerop (c))
5268 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5269 op0 = TREE_OPERAND (t, 0);
5271 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5272 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5274 /* Note that we need not handle conditional operations here since fold
5275 already handles those cases. So just do arithmetic here. */
5279 /* For a constant, we can always simplify if we are a multiply
5280 or (for divide and modulus) if it is a multiple of our constant. */
5281 if (code == MULT_EXPR
5282 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5283 return const_binop (code, fold_convert (ctype, t),
5284 fold_convert (ctype, c), 0);
5287 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5288 /* If op0 is an expression ... */
5289 if ((COMPARISON_CLASS_P (op0)
5290 || UNARY_CLASS_P (op0)
5291 || BINARY_CLASS_P (op0)
5292 || EXPRESSION_CLASS_P (op0))
5293 /* ... and is unsigned, and its type is smaller than ctype,
5294 then we cannot pass through as widening. */
5295 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5296 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5297 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5298 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5299 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5300 /* ... or this is a truncation (t is narrower than op0),
5301 then we cannot pass through this narrowing. */
5302 || (GET_MODE_SIZE (TYPE_MODE (type))
5303 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5304 /* ... or signedness changes for division or modulus,
5305 then we cannot pass through this conversion. */
5306 || (code != MULT_EXPR
5307 && (TYPE_UNSIGNED (ctype)
5308 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5311 /* Pass the constant down and see if we can make a simplification. If
5312 we can, replace this expression with the inner simplification for
5313 possible later conversion to our or some other type. */
5314 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5315 && TREE_CODE (t2) == INTEGER_CST
5316 && ! TREE_CONSTANT_OVERFLOW (t2)
5317 && (0 != (t1 = extract_muldiv (op0, t2, code,
5319 ? ctype : NULL_TREE))))
5324 /* If widening the type changes it from signed to unsigned, then we
5325 must avoid building ABS_EXPR itself as unsigned. */
5326 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5328 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5329 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5331 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5332 return fold_convert (ctype, t1);
5338 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5339 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5342 case MIN_EXPR: case MAX_EXPR:
5343 /* If widening the type changes the signedness, then we can't perform
5344 this optimization as that changes the result. */
5345 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5348 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5349 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5350 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5352 if (tree_int_cst_sgn (c) < 0)
5353 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5355 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5356 fold_convert (ctype, t2));
5360 case LSHIFT_EXPR: case RSHIFT_EXPR:
5361 /* If the second operand is constant, this is a multiplication
5362 or floor division, by a power of two, so we can treat it that
5363 way unless the multiplier or divisor overflows. Signed
5364 left-shift overflow is implementation-defined rather than
5365 undefined in C90, so do not convert signed left shift into
5367 if (TREE_CODE (op1) == INTEGER_CST
5368 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5369 /* const_binop may not detect overflow correctly,
5370 so check for it explicitly here. */
5371 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5372 && TREE_INT_CST_HIGH (op1) == 0
5373 && 0 != (t1 = fold_convert (ctype,
5374 const_binop (LSHIFT_EXPR,
5377 && ! TREE_OVERFLOW (t1))
5378 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5379 ? MULT_EXPR : FLOOR_DIV_EXPR,
5380 ctype, fold_convert (ctype, op0), t1),
5381 c, code, wide_type);
5384 case PLUS_EXPR: case MINUS_EXPR:
5385 /* See if we can eliminate the operation on both sides. If we can, we
5386 can return a new PLUS or MINUS. If we can't, the only remaining
5387 cases where we can do anything are if the second operand is a
5389 t1 = extract_muldiv (op0, c, code, wide_type);
5390 t2 = extract_muldiv (op1, c, code, wide_type);
5391 if (t1 != 0 && t2 != 0
5392 && (code == MULT_EXPR
5393 /* If not multiplication, we can only do this if both operands
5394 are divisible by c. */
5395 || (multiple_of_p (ctype, op0, c)
5396 && multiple_of_p (ctype, op1, c))))
5397 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5398 fold_convert (ctype, t2));
5400 /* If this was a subtraction, negate OP1 and set it to be an addition.
5401 This simplifies the logic below. */
5402 if (tcode == MINUS_EXPR)
5403 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5405 if (TREE_CODE (op1) != INTEGER_CST)
5408 /* If either OP1 or C are negative, this optimization is not safe for
5409 some of the division and remainder types while for others we need
5410 to change the code. */
5411 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5413 if (code == CEIL_DIV_EXPR)
5414 code = FLOOR_DIV_EXPR;
5415 else if (code == FLOOR_DIV_EXPR)
5416 code = CEIL_DIV_EXPR;
5417 else if (code != MULT_EXPR
5418 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5422 /* If it's a multiply or a division/modulus operation of a multiple
5423 of our constant, do the operation and verify it doesn't overflow. */
5424 if (code == MULT_EXPR
5425 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5427 op1 = const_binop (code, fold_convert (ctype, op1),
5428 fold_convert (ctype, c), 0);
5429 /* We allow the constant to overflow with wrapping semantics. */
5431 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5437 /* If we have an unsigned type is not a sizetype, we cannot widen
5438 the operation since it will change the result if the original
5439 computation overflowed. */
5440 if (TYPE_UNSIGNED (ctype)
5441 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5445 /* If we were able to eliminate our operation from the first side,
5446 apply our operation to the second side and reform the PLUS. */
5447 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5448 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5450 /* The last case is if we are a multiply. In that case, we can
5451 apply the distributive law to commute the multiply and addition
5452 if the multiplication of the constants doesn't overflow. */
5453 if (code == MULT_EXPR)
5454 return fold_build2 (tcode, ctype,
5455 fold_build2 (code, ctype,
5456 fold_convert (ctype, op0),
5457 fold_convert (ctype, c)),
5463 /* We have a special case here if we are doing something like
5464 (C * 8) % 4 since we know that's zero. */
5465 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5466 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5467 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5468 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5469 return omit_one_operand (type, integer_zero_node, op0);
5471 /* ... fall through ... */
5473 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5474 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5475 /* If we can extract our operation from the LHS, do so and return a
5476 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5477 do something only if the second operand is a constant. */
5479 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5480 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5481 fold_convert (ctype, op1));
5482 else if (tcode == MULT_EXPR && code == MULT_EXPR
5483 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5484 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5485 fold_convert (ctype, t1));
5486 else if (TREE_CODE (op1) != INTEGER_CST)
5489 /* If these are the same operation types, we can associate them
5490 assuming no overflow. */
5492 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5493 fold_convert (ctype, c), 0))
5494 && ! TREE_OVERFLOW (t1))
5495 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5497 /* If these operations "cancel" each other, we have the main
5498 optimizations of this pass, which occur when either constant is a
5499 multiple of the other, in which case we replace this with either an
5500 operation or CODE or TCODE.
5502 If we have an unsigned type that is not a sizetype, we cannot do
5503 this since it will change the result if the original computation
5505 if ((! TYPE_UNSIGNED (ctype)
5506 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5508 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5509 || (tcode == MULT_EXPR
5510 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5511 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5513 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5514 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5515 fold_convert (ctype,
5516 const_binop (TRUNC_DIV_EXPR,
5518 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5519 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5520 fold_convert (ctype,
5521 const_binop (TRUNC_DIV_EXPR,
5533 /* Return a node which has the indicated constant VALUE (either 0 or
5534 1), and is of the indicated TYPE. */
5537 constant_boolean_node (int value, tree type)
5539 if (type == integer_type_node)
5540 return value ? integer_one_node : integer_zero_node;
5541 else if (type == boolean_type_node)
5542 return value ? boolean_true_node : boolean_false_node;
5544 return build_int_cst (type, value);
5548 /* Return true if expr looks like an ARRAY_REF and set base and
5549 offset to the appropriate trees. If there is no offset,
5550 offset is set to NULL_TREE. Base will be canonicalized to
5551 something you can get the element type from using
5552 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5553 in bytes to the base. */
5556 extract_array_ref (tree expr, tree *base, tree *offset)
5558 /* One canonical form is a PLUS_EXPR with the first
5559 argument being an ADDR_EXPR with a possible NOP_EXPR
5561 if (TREE_CODE (expr) == PLUS_EXPR)
5563 tree op0 = TREE_OPERAND (expr, 0);
5564 tree inner_base, dummy1;
5565 /* Strip NOP_EXPRs here because the C frontends and/or
5566 folders present us (int *)&x.a + 4B possibly. */
5568 if (extract_array_ref (op0, &inner_base, &dummy1))
5571 if (dummy1 == NULL_TREE)
5572 *offset = TREE_OPERAND (expr, 1);
5574 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5575 dummy1, TREE_OPERAND (expr, 1));
5579 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5580 which we transform into an ADDR_EXPR with appropriate
5581 offset. For other arguments to the ADDR_EXPR we assume
5582 zero offset and as such do not care about the ADDR_EXPR
5583 type and strip possible nops from it. */
5584 else if (TREE_CODE (expr) == ADDR_EXPR)
5586 tree op0 = TREE_OPERAND (expr, 0);
5587 if (TREE_CODE (op0) == ARRAY_REF)
5589 tree idx = TREE_OPERAND (op0, 1);
5590 *base = TREE_OPERAND (op0, 0);
5591 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5592 array_ref_element_size (op0));
5596 /* Handle array-to-pointer decay as &a. */
5597 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5598 *base = TREE_OPERAND (expr, 0);
5601 *offset = NULL_TREE;
5605 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5606 else if (SSA_VAR_P (expr)
5607 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5610 *offset = NULL_TREE;
5618 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5619 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5620 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5621 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5622 COND is the first argument to CODE; otherwise (as in the example
5623 given here), it is the second argument. TYPE is the type of the
5624 original expression. Return NULL_TREE if no simplification is
5628 fold_binary_op_with_conditional_arg (enum tree_code code,
5629 tree type, tree op0, tree op1,
5630 tree cond, tree arg, int cond_first_p)
5632 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5633 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5634 tree test, true_value, false_value;
5635 tree lhs = NULL_TREE;
5636 tree rhs = NULL_TREE;
5638 /* This transformation is only worthwhile if we don't have to wrap
5639 arg in a SAVE_EXPR, and the operation can be simplified on at least
5640 one of the branches once its pushed inside the COND_EXPR. */
5641 if (!TREE_CONSTANT (arg))
5644 if (TREE_CODE (cond) == COND_EXPR)
5646 test = TREE_OPERAND (cond, 0);
5647 true_value = TREE_OPERAND (cond, 1);
5648 false_value = TREE_OPERAND (cond, 2);
5649 /* If this operand throws an expression, then it does not make
5650 sense to try to perform a logical or arithmetic operation
5652 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5654 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5659 tree testtype = TREE_TYPE (cond);
5661 true_value = constant_boolean_node (true, testtype);
5662 false_value = constant_boolean_node (false, testtype);
5665 arg = fold_convert (arg_type, arg);
5668 true_value = fold_convert (cond_type, true_value);
5670 lhs = fold_build2 (code, type, true_value, arg);
5672 lhs = fold_build2 (code, type, arg, true_value);
5676 false_value = fold_convert (cond_type, false_value);
5678 rhs = fold_build2 (code, type, false_value, arg);
5680 rhs = fold_build2 (code, type, arg, false_value);
5683 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5684 return fold_convert (type, test);
5688 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5690 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5691 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5692 ADDEND is the same as X.
5694 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5695 and finite. The problematic cases are when X is zero, and its mode
5696 has signed zeros. In the case of rounding towards -infinity,
5697 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5698 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5701 fold_real_zero_addition_p (tree type, tree addend, int negate)
5703 if (!real_zerop (addend))
5706 /* Don't allow the fold with -fsignaling-nans. */
5707 if (HONOR_SNANS (TYPE_MODE (type)))
5710 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5711 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5714 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5715 if (TREE_CODE (addend) == REAL_CST
5716 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5719 /* The mode has signed zeros, and we have to honor their sign.
5720 In this situation, there is only one case we can return true for.
5721 X - 0 is the same as X unless rounding towards -infinity is
5723 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5726 /* Subroutine of fold() that checks comparisons of built-in math
5727 functions against real constants.
5729 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5730 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5731 is the type of the result and ARG0 and ARG1 are the operands of the
5732 comparison. ARG1 must be a TREE_REAL_CST.
5734 The function returns the constant folded tree if a simplification
5735 can be made, and NULL_TREE otherwise. */
5738 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5739 tree type, tree arg0, tree arg1)
5743 if (BUILTIN_SQRT_P (fcode))
5745 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5746 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5748 c = TREE_REAL_CST (arg1);
5749 if (REAL_VALUE_NEGATIVE (c))
5751 /* sqrt(x) < y is always false, if y is negative. */
5752 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5753 return omit_one_operand (type, integer_zero_node, arg);
5755 /* sqrt(x) > y is always true, if y is negative and we
5756 don't care about NaNs, i.e. negative values of x. */
5757 if (code == NE_EXPR || !HONOR_NANS (mode))
5758 return omit_one_operand (type, integer_one_node, arg);
5760 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5761 return fold_build2 (GE_EXPR, type, arg,
5762 build_real (TREE_TYPE (arg), dconst0));
5764 else if (code == GT_EXPR || code == GE_EXPR)
5768 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5769 real_convert (&c2, mode, &c2);
5771 if (REAL_VALUE_ISINF (c2))
5773 /* sqrt(x) > y is x == +Inf, when y is very large. */
5774 if (HONOR_INFINITIES (mode))
5775 return fold_build2 (EQ_EXPR, type, arg,
5776 build_real (TREE_TYPE (arg), c2));
5778 /* sqrt(x) > y is always false, when y is very large
5779 and we don't care about infinities. */
5780 return omit_one_operand (type, integer_zero_node, arg);
5783 /* sqrt(x) > c is the same as x > c*c. */
5784 return fold_build2 (code, type, arg,
5785 build_real (TREE_TYPE (arg), c2));
5787 else if (code == LT_EXPR || code == LE_EXPR)
5791 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5792 real_convert (&c2, mode, &c2);
5794 if (REAL_VALUE_ISINF (c2))
5796 /* sqrt(x) < y is always true, when y is a very large
5797 value and we don't care about NaNs or Infinities. */
5798 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5799 return omit_one_operand (type, integer_one_node, arg);
5801 /* sqrt(x) < y is x != +Inf when y is very large and we
5802 don't care about NaNs. */
5803 if (! HONOR_NANS (mode))
5804 return fold_build2 (NE_EXPR, type, arg,
5805 build_real (TREE_TYPE (arg), c2));
5807 /* sqrt(x) < y is x >= 0 when y is very large and we
5808 don't care about Infinities. */
5809 if (! HONOR_INFINITIES (mode))
5810 return fold_build2 (GE_EXPR, type, arg,
5811 build_real (TREE_TYPE (arg), dconst0));
5813 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5814 if (lang_hooks.decls.global_bindings_p () != 0
5815 || CONTAINS_PLACEHOLDER_P (arg))
5818 arg = save_expr (arg);
5819 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5820 fold_build2 (GE_EXPR, type, arg,
5821 build_real (TREE_TYPE (arg),
5823 fold_build2 (NE_EXPR, type, arg,
5824 build_real (TREE_TYPE (arg),
5828 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5829 if (! HONOR_NANS (mode))
5830 return fold_build2 (code, type, arg,
5831 build_real (TREE_TYPE (arg), c2));
5833 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5834 if (lang_hooks.decls.global_bindings_p () == 0
5835 && ! CONTAINS_PLACEHOLDER_P (arg))
5837 arg = save_expr (arg);
5838 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5839 fold_build2 (GE_EXPR, type, arg,
5840 build_real (TREE_TYPE (arg),
5842 fold_build2 (code, type, arg,
5843 build_real (TREE_TYPE (arg),
5852 /* Subroutine of fold() that optimizes comparisons against Infinities,
5853 either +Inf or -Inf.
5855 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5856 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5857 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5859 The function returns the constant folded tree if a simplification
5860 can be made, and NULL_TREE otherwise. */
5863 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5865 enum machine_mode mode;
5866 REAL_VALUE_TYPE max;
5870 mode = TYPE_MODE (TREE_TYPE (arg0));
5872 /* For negative infinity swap the sense of the comparison. */
5873 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5875 code = swap_tree_comparison (code);
5880 /* x > +Inf is always false, if with ignore sNANs. */
5881 if (HONOR_SNANS (mode))
5883 return omit_one_operand (type, integer_zero_node, arg0);
5886 /* x <= +Inf is always true, if we don't case about NaNs. */
5887 if (! HONOR_NANS (mode))
5888 return omit_one_operand (type, integer_one_node, arg0);
5890 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5891 if (lang_hooks.decls.global_bindings_p () == 0
5892 && ! CONTAINS_PLACEHOLDER_P (arg0))
5894 arg0 = save_expr (arg0);
5895 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5901 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5902 real_maxval (&max, neg, mode);
5903 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5904 arg0, build_real (TREE_TYPE (arg0), max));
5907 /* x < +Inf is always equal to x <= DBL_MAX. */
5908 real_maxval (&max, neg, mode);
5909 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5910 arg0, build_real (TREE_TYPE (arg0), max));
5913 /* x != +Inf is always equal to !(x > DBL_MAX). */
5914 real_maxval (&max, neg, mode);
5915 if (! HONOR_NANS (mode))
5916 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5917 arg0, build_real (TREE_TYPE (arg0), max));
5919 /* The transformation below creates non-gimple code and thus is
5920 not appropriate if we are in gimple form. */
5924 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5925 arg0, build_real (TREE_TYPE (arg0), max));
5926 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5935 /* Subroutine of fold() that optimizes comparisons of a division by
5936 a nonzero integer constant against an integer constant, i.e.
5939 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5940 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5941 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5943 The function returns the constant folded tree if a simplification
5944 can be made, and NULL_TREE otherwise. */
5947 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5949 tree prod, tmp, hi, lo;
5950 tree arg00 = TREE_OPERAND (arg0, 0);
5951 tree arg01 = TREE_OPERAND (arg0, 1);
5952 unsigned HOST_WIDE_INT lpart;
5953 HOST_WIDE_INT hpart;
5956 /* We have to do this the hard way to detect unsigned overflow.
5957 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5958 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5959 TREE_INT_CST_HIGH (arg01),
5960 TREE_INT_CST_LOW (arg1),
5961 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5962 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5963 prod = force_fit_type (prod, -1, overflow, false);
5965 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5967 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5970 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5971 overflow = add_double (TREE_INT_CST_LOW (prod),
5972 TREE_INT_CST_HIGH (prod),
5973 TREE_INT_CST_LOW (tmp),
5974 TREE_INT_CST_HIGH (tmp),
5976 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5977 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5978 TREE_CONSTANT_OVERFLOW (prod));
5980 else if (tree_int_cst_sgn (arg01) >= 0)
5982 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5983 switch (tree_int_cst_sgn (arg1))
5986 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5991 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5996 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6006 /* A negative divisor reverses the relational operators. */
6007 code = swap_tree_comparison (code);
6009 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6010 switch (tree_int_cst_sgn (arg1))
6013 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6018 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6023 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6035 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6036 return omit_one_operand (type, integer_zero_node, arg00);
6037 if (TREE_OVERFLOW (hi))
6038 return fold_build2 (GE_EXPR, type, arg00, lo);
6039 if (TREE_OVERFLOW (lo))
6040 return fold_build2 (LE_EXPR, type, arg00, hi);
6041 return build_range_check (type, arg00, 1, lo, hi);
6044 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6045 return omit_one_operand (type, integer_one_node, arg00);
6046 if (TREE_OVERFLOW (hi))
6047 return fold_build2 (LT_EXPR, type, arg00, lo);
6048 if (TREE_OVERFLOW (lo))
6049 return fold_build2 (GT_EXPR, type, arg00, hi);
6050 return build_range_check (type, arg00, 0, lo, hi);
6053 if (TREE_OVERFLOW (lo))
6054 return omit_one_operand (type, integer_zero_node, arg00);
6055 return fold_build2 (LT_EXPR, type, arg00, lo);
6058 if (TREE_OVERFLOW (hi))
6059 return omit_one_operand (type, integer_one_node, arg00);
6060 return fold_build2 (LE_EXPR, type, arg00, hi);
6063 if (TREE_OVERFLOW (hi))
6064 return omit_one_operand (type, integer_zero_node, arg00);
6065 return fold_build2 (GT_EXPR, type, arg00, hi);
6068 if (TREE_OVERFLOW (lo))
6069 return omit_one_operand (type, integer_one_node, arg00);
6070 return fold_build2 (GE_EXPR, type, arg00, lo);
6080 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6081 equality/inequality test, then return a simplified form of the test
6082 using a sign testing. Otherwise return NULL. TYPE is the desired
6086 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6089 /* If this is testing a single bit, we can optimize the test. */
6090 if ((code == NE_EXPR || code == EQ_EXPR)
6091 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6092 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6094 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6095 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6096 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6098 if (arg00 != NULL_TREE
6099 /* This is only a win if casting to a signed type is cheap,
6100 i.e. when arg00's type is not a partial mode. */
6101 && TYPE_PRECISION (TREE_TYPE (arg00))
6102 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6104 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6105 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6106 result_type, fold_convert (stype, arg00),
6107 fold_convert (stype, integer_zero_node));
6114 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6115 equality/inequality test, then return a simplified form of
6116 the test using shifts and logical operations. Otherwise return
6117 NULL. TYPE is the desired result type. */
6120 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6123 /* If this is testing a single bit, we can optimize the test. */
6124 if ((code == NE_EXPR || code == EQ_EXPR)
6125 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6126 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6128 tree inner = TREE_OPERAND (arg0, 0);
6129 tree type = TREE_TYPE (arg0);
6130 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6131 enum machine_mode operand_mode = TYPE_MODE (type);
6133 tree signed_type, unsigned_type, intermediate_type;
6136 /* First, see if we can fold the single bit test into a sign-bit
6138 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6143 /* Otherwise we have (A & C) != 0 where C is a single bit,
6144 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6145 Similarly for (A & C) == 0. */
6147 /* If INNER is a right shift of a constant and it plus BITNUM does
6148 not overflow, adjust BITNUM and INNER. */
6149 if (TREE_CODE (inner) == RSHIFT_EXPR
6150 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6151 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6152 && bitnum < TYPE_PRECISION (type)
6153 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6154 bitnum - TYPE_PRECISION (type)))
6156 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6157 inner = TREE_OPERAND (inner, 0);
6160 /* If we are going to be able to omit the AND below, we must do our
6161 operations as unsigned. If we must use the AND, we have a choice.
6162 Normally unsigned is faster, but for some machines signed is. */
6163 #ifdef LOAD_EXTEND_OP
6164 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6165 && !flag_syntax_only) ? 0 : 1;
6170 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6171 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6172 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6173 inner = fold_convert (intermediate_type, inner);
6176 inner = build2 (RSHIFT_EXPR, intermediate_type,
6177 inner, size_int (bitnum));
6179 if (code == EQ_EXPR)
6180 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6181 inner, integer_one_node);
6183 /* Put the AND last so it can combine with more things. */
6184 inner = build2 (BIT_AND_EXPR, intermediate_type,
6185 inner, integer_one_node);
6187 /* Make sure to return the proper type. */
6188 inner = fold_convert (result_type, inner);
6195 /* Check whether we are allowed to reorder operands arg0 and arg1,
6196 such that the evaluation of arg1 occurs before arg0. */
6199 reorder_operands_p (tree arg0, tree arg1)
6201 if (! flag_evaluation_order)
6203 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6205 return ! TREE_SIDE_EFFECTS (arg0)
6206 && ! TREE_SIDE_EFFECTS (arg1);
6209 /* Test whether it is preferable two swap two operands, ARG0 and
6210 ARG1, for example because ARG0 is an integer constant and ARG1
6211 isn't. If REORDER is true, only recommend swapping if we can
6212 evaluate the operands in reverse order. */
6215 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6217 STRIP_SIGN_NOPS (arg0);
6218 STRIP_SIGN_NOPS (arg1);
6220 if (TREE_CODE (arg1) == INTEGER_CST)
6222 if (TREE_CODE (arg0) == INTEGER_CST)
6225 if (TREE_CODE (arg1) == REAL_CST)
6227 if (TREE_CODE (arg0) == REAL_CST)
6230 if (TREE_CODE (arg1) == COMPLEX_CST)
6232 if (TREE_CODE (arg0) == COMPLEX_CST)
6235 if (TREE_CONSTANT (arg1))
6237 if (TREE_CONSTANT (arg0))
6243 if (reorder && flag_evaluation_order
6244 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6252 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6253 for commutative and comparison operators. Ensuring a canonical
6254 form allows the optimizers to find additional redundancies without
6255 having to explicitly check for both orderings. */
6256 if (TREE_CODE (arg0) == SSA_NAME
6257 && TREE_CODE (arg1) == SSA_NAME
6258 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6264 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6265 ARG0 is extended to a wider type. */
6268 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6270 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6272 tree shorter_type, outer_type;
6276 if (arg0_unw == arg0)
6278 shorter_type = TREE_TYPE (arg0_unw);
6280 #ifdef HAVE_canonicalize_funcptr_for_compare
6281 /* Disable this optimization if we're casting a function pointer
6282 type on targets that require function pointer canonicalization. */
6283 if (HAVE_canonicalize_funcptr_for_compare
6284 && TREE_CODE (shorter_type) == POINTER_TYPE
6285 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6289 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6292 arg1_unw = get_unwidened (arg1, shorter_type);
6294 /* If possible, express the comparison in the shorter mode. */
6295 if ((code == EQ_EXPR || code == NE_EXPR
6296 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6297 && (TREE_TYPE (arg1_unw) == shorter_type
6298 || (TREE_CODE (arg1_unw) == INTEGER_CST
6299 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6300 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6301 && int_fits_type_p (arg1_unw, shorter_type))))
6302 return fold_build2 (code, type, arg0_unw,
6303 fold_convert (shorter_type, arg1_unw));
6305 if (TREE_CODE (arg1_unw) != INTEGER_CST
6306 || TREE_CODE (shorter_type) != INTEGER_TYPE
6307 || !int_fits_type_p (arg1_unw, shorter_type))
6310 /* If we are comparing with the integer that does not fit into the range
6311 of the shorter type, the result is known. */
6312 outer_type = TREE_TYPE (arg1_unw);
6313 min = lower_bound_in_type (outer_type, shorter_type);
6314 max = upper_bound_in_type (outer_type, shorter_type);
6316 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6318 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6325 return omit_one_operand (type, integer_zero_node, arg0);
6330 return omit_one_operand (type, integer_one_node, arg0);
6336 return omit_one_operand (type, integer_one_node, arg0);
6338 return omit_one_operand (type, integer_zero_node, arg0);
6343 return omit_one_operand (type, integer_zero_node, arg0);
6345 return omit_one_operand (type, integer_one_node, arg0);
6354 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6355 ARG0 just the signedness is changed. */
6358 fold_sign_changed_comparison (enum tree_code code, tree type,
6359 tree arg0, tree arg1)
6361 tree arg0_inner, tmp;
6362 tree inner_type, outer_type;
6364 if (TREE_CODE (arg0) != NOP_EXPR
6365 && TREE_CODE (arg0) != CONVERT_EXPR)
6368 outer_type = TREE_TYPE (arg0);
6369 arg0_inner = TREE_OPERAND (arg0, 0);
6370 inner_type = TREE_TYPE (arg0_inner);
6372 #ifdef HAVE_canonicalize_funcptr_for_compare
6373 /* Disable this optimization if we're casting a function pointer
6374 type on targets that require function pointer canonicalization. */
6375 if (HAVE_canonicalize_funcptr_for_compare
6376 && TREE_CODE (inner_type) == POINTER_TYPE
6377 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6381 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6384 if (TREE_CODE (arg1) != INTEGER_CST
6385 && !((TREE_CODE (arg1) == NOP_EXPR
6386 || TREE_CODE (arg1) == CONVERT_EXPR)
6387 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6390 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6395 if (TREE_CODE (arg1) == INTEGER_CST)
6397 tmp = build_int_cst_wide (inner_type,
6398 TREE_INT_CST_LOW (arg1),
6399 TREE_INT_CST_HIGH (arg1));
6400 arg1 = force_fit_type (tmp, 0,
6401 TREE_OVERFLOW (arg1),
6402 TREE_CONSTANT_OVERFLOW (arg1));
6405 arg1 = fold_convert (inner_type, arg1);
6407 return fold_build2 (code, type, arg0_inner, arg1);
6410 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6411 step of the array. Reconstructs s and delta in the case of s * delta
6412 being an integer constant (and thus already folded).
6413 ADDR is the address. MULT is the multiplicative expression.
6414 If the function succeeds, the new address expression is returned. Otherwise
6415 NULL_TREE is returned. */
6418 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6420 tree s, delta, step;
6421 tree ref = TREE_OPERAND (addr, 0), pref;
6425 /* Canonicalize op1 into a possibly non-constant delta
6426 and an INTEGER_CST s. */
6427 if (TREE_CODE (op1) == MULT_EXPR)
6429 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6434 if (TREE_CODE (arg0) == INTEGER_CST)
6439 else if (TREE_CODE (arg1) == INTEGER_CST)
6447 else if (TREE_CODE (op1) == INTEGER_CST)
6454 /* Simulate we are delta * 1. */
6456 s = integer_one_node;
6459 for (;; ref = TREE_OPERAND (ref, 0))
6461 if (TREE_CODE (ref) == ARRAY_REF)
6463 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6467 step = array_ref_element_size (ref);
6468 if (TREE_CODE (step) != INTEGER_CST)
6473 if (! tree_int_cst_equal (step, s))
6478 /* Try if delta is a multiple of step. */
6479 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6488 if (!handled_component_p (ref))
6492 /* We found the suitable array reference. So copy everything up to it,
6493 and replace the index. */
6495 pref = TREE_OPERAND (addr, 0);
6496 ret = copy_node (pref);
6501 pref = TREE_OPERAND (pref, 0);
6502 TREE_OPERAND (pos, 0) = copy_node (pref);
6503 pos = TREE_OPERAND (pos, 0);
6506 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6507 fold_convert (itype,
6508 TREE_OPERAND (pos, 1)),
6509 fold_convert (itype, delta));
6511 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6515 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6516 means A >= Y && A != MAX, but in this case we know that
6517 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6520 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6522 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6524 if (TREE_CODE (bound) == LT_EXPR)
6525 a = TREE_OPERAND (bound, 0);
6526 else if (TREE_CODE (bound) == GT_EXPR)
6527 a = TREE_OPERAND (bound, 1);
6531 typea = TREE_TYPE (a);
6532 if (!INTEGRAL_TYPE_P (typea)
6533 && !POINTER_TYPE_P (typea))
6536 if (TREE_CODE (ineq) == LT_EXPR)
6538 a1 = TREE_OPERAND (ineq, 1);
6539 y = TREE_OPERAND (ineq, 0);
6541 else if (TREE_CODE (ineq) == GT_EXPR)
6543 a1 = TREE_OPERAND (ineq, 0);
6544 y = TREE_OPERAND (ineq, 1);
6549 if (TREE_TYPE (a1) != typea)
6552 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6553 if (!integer_onep (diff))
6556 return fold_build2 (GE_EXPR, type, a, y);
6559 /* Fold a sum or difference of at least one multiplication.
6560 Returns the folded tree or NULL if no simplification could be made. */
6563 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6565 tree arg00, arg01, arg10, arg11;
6566 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6568 /* (A * C) +- (B * C) -> (A+-B) * C.
6569 (A * C) +- A -> A * (C+-1).
6570 We are most concerned about the case where C is a constant,
6571 but other combinations show up during loop reduction. Since
6572 it is not difficult, try all four possibilities. */
6574 if (TREE_CODE (arg0) == MULT_EXPR)
6576 arg00 = TREE_OPERAND (arg0, 0);
6577 arg01 = TREE_OPERAND (arg0, 1);
6582 if (!FLOAT_TYPE_P (type))
6583 arg01 = build_int_cst (type, 1);
6585 arg01 = build_real (type, dconst1);
6587 if (TREE_CODE (arg1) == MULT_EXPR)
6589 arg10 = TREE_OPERAND (arg1, 0);
6590 arg11 = TREE_OPERAND (arg1, 1);
6595 if (!FLOAT_TYPE_P (type))
6596 arg11 = build_int_cst (type, 1);
6598 arg11 = build_real (type, dconst1);
6602 if (operand_equal_p (arg01, arg11, 0))
6603 same = arg01, alt0 = arg00, alt1 = arg10;
6604 else if (operand_equal_p (arg00, arg10, 0))
6605 same = arg00, alt0 = arg01, alt1 = arg11;
6606 else if (operand_equal_p (arg00, arg11, 0))
6607 same = arg00, alt0 = arg01, alt1 = arg10;
6608 else if (operand_equal_p (arg01, arg10, 0))
6609 same = arg01, alt0 = arg00, alt1 = arg11;
6611 /* No identical multiplicands; see if we can find a common
6612 power-of-two factor in non-power-of-two multiplies. This
6613 can help in multi-dimensional array access. */
6614 else if (host_integerp (arg01, 0)
6615 && host_integerp (arg11, 0))
6617 HOST_WIDE_INT int01, int11, tmp;
6620 int01 = TREE_INT_CST_LOW (arg01);
6621 int11 = TREE_INT_CST_LOW (arg11);
6623 /* Move min of absolute values to int11. */
6624 if ((int01 >= 0 ? int01 : -int01)
6625 < (int11 >= 0 ? int11 : -int11))
6627 tmp = int01, int01 = int11, int11 = tmp;
6628 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6635 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6637 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6638 build_int_cst (TREE_TYPE (arg00),
6643 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6648 return fold_build2 (MULT_EXPR, type,
6649 fold_build2 (code, type,
6650 fold_convert (type, alt0),
6651 fold_convert (type, alt1)),
6652 fold_convert (type, same));
6657 /* Fold a unary expression of code CODE and type TYPE with operand
6658 OP0. Return the folded expression if folding is successful.
6659 Otherwise, return NULL_TREE. */
6662 fold_unary (enum tree_code code, tree type, tree op0)
6666 enum tree_code_class kind = TREE_CODE_CLASS (code);
6668 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6669 && TREE_CODE_LENGTH (code) == 1);
6674 if (code == NOP_EXPR || code == CONVERT_EXPR
6675 || code == FLOAT_EXPR || code == ABS_EXPR)
6677 /* Don't use STRIP_NOPS, because signedness of argument type
6679 STRIP_SIGN_NOPS (arg0);
6683 /* Strip any conversions that don't change the mode. This
6684 is safe for every expression, except for a comparison
6685 expression because its signedness is derived from its
6688 Note that this is done as an internal manipulation within
6689 the constant folder, in order to find the simplest
6690 representation of the arguments so that their form can be
6691 studied. In any cases, the appropriate type conversions
6692 should be put back in the tree that will get out of the
6698 if (TREE_CODE_CLASS (code) == tcc_unary)
6700 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6701 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6702 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6703 else if (TREE_CODE (arg0) == COND_EXPR)
6705 tree arg01 = TREE_OPERAND (arg0, 1);
6706 tree arg02 = TREE_OPERAND (arg0, 2);
6707 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6708 arg01 = fold_build1 (code, type, arg01);
6709 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6710 arg02 = fold_build1 (code, type, arg02);
6711 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6714 /* If this was a conversion, and all we did was to move into
6715 inside the COND_EXPR, bring it back out. But leave it if
6716 it is a conversion from integer to integer and the
6717 result precision is no wider than a word since such a
6718 conversion is cheap and may be optimized away by combine,
6719 while it couldn't if it were outside the COND_EXPR. Then return
6720 so we don't get into an infinite recursion loop taking the
6721 conversion out and then back in. */
6723 if ((code == NOP_EXPR || code == CONVERT_EXPR
6724 || code == NON_LVALUE_EXPR)
6725 && TREE_CODE (tem) == COND_EXPR
6726 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6727 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6728 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6729 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6730 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6731 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6732 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6734 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6735 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6736 || flag_syntax_only))
6737 tem = build1 (code, type,
6739 TREE_TYPE (TREE_OPERAND
6740 (TREE_OPERAND (tem, 1), 0)),
6741 TREE_OPERAND (tem, 0),
6742 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6743 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6746 else if (COMPARISON_CLASS_P (arg0))
6748 if (TREE_CODE (type) == BOOLEAN_TYPE)
6750 arg0 = copy_node (arg0);
6751 TREE_TYPE (arg0) = type;
6754 else if (TREE_CODE (type) != INTEGER_TYPE)
6755 return fold_build3 (COND_EXPR, type, arg0,
6756 fold_build1 (code, type,
6758 fold_build1 (code, type,
6759 integer_zero_node));
6768 case FIX_TRUNC_EXPR:
6770 case FIX_FLOOR_EXPR:
6771 case FIX_ROUND_EXPR:
6772 if (TREE_TYPE (op0) == type)
6775 /* If we have (type) (a CMP b) and type is an integal type, return
6776 new expression involving the new type. */
6777 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6778 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6779 TREE_OPERAND (op0, 1));
6781 /* Handle cases of two conversions in a row. */
6782 if (TREE_CODE (op0) == NOP_EXPR
6783 || TREE_CODE (op0) == CONVERT_EXPR)
6785 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6786 tree inter_type = TREE_TYPE (op0);
6787 int inside_int = INTEGRAL_TYPE_P (inside_type);
6788 int inside_ptr = POINTER_TYPE_P (inside_type);
6789 int inside_float = FLOAT_TYPE_P (inside_type);
6790 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6791 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6792 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6793 int inter_int = INTEGRAL_TYPE_P (inter_type);
6794 int inter_ptr = POINTER_TYPE_P (inter_type);
6795 int inter_float = FLOAT_TYPE_P (inter_type);
6796 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6797 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6798 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6799 int final_int = INTEGRAL_TYPE_P (type);
6800 int final_ptr = POINTER_TYPE_P (type);
6801 int final_float = FLOAT_TYPE_P (type);
6802 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6803 unsigned int final_prec = TYPE_PRECISION (type);
6804 int final_unsignedp = TYPE_UNSIGNED (type);
6806 /* In addition to the cases of two conversions in a row
6807 handled below, if we are converting something to its own
6808 type via an object of identical or wider precision, neither
6809 conversion is needed. */
6810 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6811 && ((inter_int && final_int) || (inter_float && final_float))
6812 && inter_prec >= final_prec)
6813 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6815 /* Likewise, if the intermediate and final types are either both
6816 float or both integer, we don't need the middle conversion if
6817 it is wider than the final type and doesn't change the signedness
6818 (for integers). Avoid this if the final type is a pointer
6819 since then we sometimes need the inner conversion. Likewise if
6820 the outer has a precision not equal to the size of its mode. */
6821 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6822 || (inter_float && inside_float)
6823 || (inter_vec && inside_vec))
6824 && inter_prec >= inside_prec
6825 && (inter_float || inter_vec
6826 || inter_unsignedp == inside_unsignedp)
6827 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6828 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6830 && (! final_vec || inter_prec == inside_prec))
6831 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6833 /* If we have a sign-extension of a zero-extended value, we can
6834 replace that by a single zero-extension. */
6835 if (inside_int && inter_int && final_int
6836 && inside_prec < inter_prec && inter_prec < final_prec
6837 && inside_unsignedp && !inter_unsignedp)
6838 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6840 /* Two conversions in a row are not needed unless:
6841 - some conversion is floating-point (overstrict for now), or
6842 - some conversion is a vector (overstrict for now), or
6843 - the intermediate type is narrower than both initial and
6845 - the intermediate type and innermost type differ in signedness,
6846 and the outermost type is wider than the intermediate, or
6847 - the initial type is a pointer type and the precisions of the
6848 intermediate and final types differ, or
6849 - the final type is a pointer type and the precisions of the
6850 initial and intermediate types differ. */
6851 if (! inside_float && ! inter_float && ! final_float
6852 && ! inside_vec && ! inter_vec && ! final_vec
6853 && (inter_prec > inside_prec || inter_prec > final_prec)
6854 && ! (inside_int && inter_int
6855 && inter_unsignedp != inside_unsignedp
6856 && inter_prec < final_prec)
6857 && ((inter_unsignedp && inter_prec > inside_prec)
6858 == (final_unsignedp && final_prec > inter_prec))
6859 && ! (inside_ptr && inter_prec != final_prec)
6860 && ! (final_ptr && inside_prec != inter_prec)
6861 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6862 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6864 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6867 /* Handle (T *)&A.B.C for A being of type T and B and C
6868 living at offset zero. This occurs frequently in
6869 C++ upcasting and then accessing the base. */
6870 if (TREE_CODE (op0) == ADDR_EXPR
6871 && POINTER_TYPE_P (type)
6872 && handled_component_p (TREE_OPERAND (op0, 0)))
6874 HOST_WIDE_INT bitsize, bitpos;
6876 enum machine_mode mode;
6877 int unsignedp, volatilep;
6878 tree base = TREE_OPERAND (op0, 0);
6879 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6880 &mode, &unsignedp, &volatilep, false);
6881 /* If the reference was to a (constant) zero offset, we can use
6882 the address of the base if it has the same base type
6883 as the result type. */
6884 if (! offset && bitpos == 0
6885 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6886 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6887 return fold_convert (type, build_fold_addr_expr (base));
6890 if (TREE_CODE (op0) == MODIFY_EXPR
6891 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6892 /* Detect assigning a bitfield. */
6893 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6894 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6896 /* Don't leave an assignment inside a conversion
6897 unless assigning a bitfield. */
6898 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6899 /* First do the assignment, then return converted constant. */
6900 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6901 TREE_NO_WARNING (tem) = 1;
6902 TREE_USED (tem) = 1;
6906 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6907 constants (if x has signed type, the sign bit cannot be set
6908 in c). This folds extension into the BIT_AND_EXPR. */
6909 if (INTEGRAL_TYPE_P (type)
6910 && TREE_CODE (type) != BOOLEAN_TYPE
6911 && TREE_CODE (op0) == BIT_AND_EXPR
6912 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6915 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6918 if (TYPE_UNSIGNED (TREE_TYPE (and))
6919 || (TYPE_PRECISION (type)
6920 <= TYPE_PRECISION (TREE_TYPE (and))))
6922 else if (TYPE_PRECISION (TREE_TYPE (and1))
6923 <= HOST_BITS_PER_WIDE_INT
6924 && host_integerp (and1, 1))
6926 unsigned HOST_WIDE_INT cst;
6928 cst = tree_low_cst (and1, 1);
6929 cst &= (HOST_WIDE_INT) -1
6930 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6931 change = (cst == 0);
6932 #ifdef LOAD_EXTEND_OP
6934 && !flag_syntax_only
6935 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6938 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6939 and0 = fold_convert (uns, and0);
6940 and1 = fold_convert (uns, and1);
6946 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6947 TREE_INT_CST_HIGH (and1));
6948 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6949 TREE_CONSTANT_OVERFLOW (and1));
6950 return fold_build2 (BIT_AND_EXPR, type,
6951 fold_convert (type, and0), tem);
6955 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6956 T2 being pointers to types of the same size. */
6957 if (POINTER_TYPE_P (type)
6958 && BINARY_CLASS_P (arg0)
6959 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6960 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6962 tree arg00 = TREE_OPERAND (arg0, 0);
6964 tree t1 = TREE_TYPE (arg00);
6965 tree tt0 = TREE_TYPE (t0);
6966 tree tt1 = TREE_TYPE (t1);
6967 tree s0 = TYPE_SIZE (tt0);
6968 tree s1 = TYPE_SIZE (tt1);
6970 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6971 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6972 TREE_OPERAND (arg0, 1));
6975 tem = fold_convert_const (code, type, arg0);
6976 return tem ? tem : NULL_TREE;
6978 case VIEW_CONVERT_EXPR:
6979 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6980 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6984 if (negate_expr_p (arg0))
6985 return fold_convert (type, negate_expr (arg0));
6986 /* Convert - (~A) to A + 1. */
6987 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6988 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6989 build_int_cst (type, 1));
6993 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6994 return fold_abs_const (arg0, type);
6995 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6996 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6997 /* Convert fabs((double)float) into (double)fabsf(float). */
6998 else if (TREE_CODE (arg0) == NOP_EXPR
6999 && TREE_CODE (type) == REAL_TYPE)
7001 tree targ0 = strip_float_extensions (arg0);
7003 return fold_convert (type, fold_build1 (ABS_EXPR,
7007 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7008 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7011 /* Strip sign ops from argument. */
7012 if (TREE_CODE (type) == REAL_TYPE)
7014 tem = fold_strip_sign_ops (arg0);
7016 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7021 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7022 return fold_convert (type, arg0);
7023 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7024 return build2 (COMPLEX_EXPR, type,
7025 TREE_OPERAND (arg0, 0),
7026 negate_expr (TREE_OPERAND (arg0, 1)));
7027 else if (TREE_CODE (arg0) == COMPLEX_CST)
7028 return build_complex (type, TREE_REALPART (arg0),
7029 negate_expr (TREE_IMAGPART (arg0)));
7030 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7031 return fold_build2 (TREE_CODE (arg0), type,
7032 fold_build1 (CONJ_EXPR, type,
7033 TREE_OPERAND (arg0, 0)),
7034 fold_build1 (CONJ_EXPR, type,
7035 TREE_OPERAND (arg0, 1)));
7036 else if (TREE_CODE (arg0) == CONJ_EXPR)
7037 return TREE_OPERAND (arg0, 0);
7041 if (TREE_CODE (arg0) == INTEGER_CST)
7042 return fold_not_const (arg0, type);
7043 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7044 return TREE_OPERAND (arg0, 0);
7045 /* Convert ~ (-A) to A - 1. */
7046 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7047 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7048 build_int_cst (type, 1));
7049 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7050 else if (INTEGRAL_TYPE_P (type)
7051 && ((TREE_CODE (arg0) == MINUS_EXPR
7052 && integer_onep (TREE_OPERAND (arg0, 1)))
7053 || (TREE_CODE (arg0) == PLUS_EXPR
7054 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7055 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7056 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7057 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7058 && (tem = fold_unary (BIT_NOT_EXPR, type,
7060 TREE_OPERAND (arg0, 0)))))
7061 return fold_build2 (BIT_XOR_EXPR, type, tem,
7062 fold_convert (type, TREE_OPERAND (arg0, 1)));
7063 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7064 && (tem = fold_unary (BIT_NOT_EXPR, type,
7066 TREE_OPERAND (arg0, 1)))))
7067 return fold_build2 (BIT_XOR_EXPR, type,
7068 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7072 case TRUTH_NOT_EXPR:
7073 /* The argument to invert_truthvalue must have Boolean type. */
7074 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7075 arg0 = fold_convert (boolean_type_node, arg0);
7077 /* Note that the operand of this must be an int
7078 and its values must be 0 or 1.
7079 ("true" is a fixed value perhaps depending on the language,
7080 but we don't handle values other than 1 correctly yet.) */
7081 tem = invert_truthvalue (arg0);
7082 /* Avoid infinite recursion. */
7083 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7085 return fold_convert (type, tem);
7088 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7090 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7091 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7092 TREE_OPERAND (arg0, 1));
7093 else if (TREE_CODE (arg0) == COMPLEX_CST)
7094 return TREE_REALPART (arg0);
7095 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7096 return fold_build2 (TREE_CODE (arg0), type,
7097 fold_build1 (REALPART_EXPR, type,
7098 TREE_OPERAND (arg0, 0)),
7099 fold_build1 (REALPART_EXPR, type,
7100 TREE_OPERAND (arg0, 1)));
7104 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7105 return fold_convert (type, integer_zero_node);
7106 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7107 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7108 TREE_OPERAND (arg0, 0));
7109 else if (TREE_CODE (arg0) == COMPLEX_CST)
7110 return TREE_IMAGPART (arg0);
7111 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7112 return fold_build2 (TREE_CODE (arg0), type,
7113 fold_build1 (IMAGPART_EXPR, type,
7114 TREE_OPERAND (arg0, 0)),
7115 fold_build1 (IMAGPART_EXPR, type,
7116 TREE_OPERAND (arg0, 1)));
7121 } /* switch (code) */
7124 /* Fold a binary expression of code CODE and type TYPE with operands
7125 OP0 and OP1. Return the folded expression if folding is
7126 successful. Otherwise, return NULL_TREE. */
7129 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7131 tree t1 = NULL_TREE;
7133 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7134 enum tree_code_class kind = TREE_CODE_CLASS (code);
7136 /* WINS will be nonzero when the switch is done
7137 if all operands are constant. */
7140 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7141 && TREE_CODE_LENGTH (code) == 2);
7150 /* Strip any conversions that don't change the mode. This is
7151 safe for every expression, except for a comparison expression
7152 because its signedness is derived from its operands. So, in
7153 the latter case, only strip conversions that don't change the
7156 Note that this is done as an internal manipulation within the
7157 constant folder, in order to find the simplest representation
7158 of the arguments so that their form can be studied. In any
7159 cases, the appropriate type conversions should be put back in
7160 the tree that will get out of the constant folder. */
7161 if (kind == tcc_comparison)
7162 STRIP_SIGN_NOPS (arg0);
7166 if (TREE_CODE (arg0) == COMPLEX_CST)
7167 subop = TREE_REALPART (arg0);
7171 if (TREE_CODE (subop) != INTEGER_CST
7172 && TREE_CODE (subop) != REAL_CST)
7173 /* Note that TREE_CONSTANT isn't enough:
7174 static var addresses are constant but we can't
7175 do arithmetic on them. */
7183 /* Strip any conversions that don't change the mode. This is
7184 safe for every expression, except for a comparison expression
7185 because its signedness is derived from its operands. So, in
7186 the latter case, only strip conversions that don't change the
7189 Note that this is done as an internal manipulation within the
7190 constant folder, in order to find the simplest representation
7191 of the arguments so that their form can be studied. In any
7192 cases, the appropriate type conversions should be put back in
7193 the tree that will get out of the constant folder. */
7194 if (kind == tcc_comparison)
7195 STRIP_SIGN_NOPS (arg1);
7199 if (TREE_CODE (arg1) == COMPLEX_CST)
7200 subop = TREE_REALPART (arg1);
7204 if (TREE_CODE (subop) != INTEGER_CST
7205 && TREE_CODE (subop) != REAL_CST)
7206 /* Note that TREE_CONSTANT isn't enough:
7207 static var addresses are constant but we can't
7208 do arithmetic on them. */
7212 /* If this is a commutative operation, and ARG0 is a constant, move it
7213 to ARG1 to reduce the number of tests below. */
7214 if (commutative_tree_code (code)
7215 && tree_swap_operands_p (arg0, arg1, true))
7216 return fold_build2 (code, type, op1, op0);
7218 /* Now WINS is set as described above,
7219 ARG0 is the first operand of EXPR,
7220 and ARG1 is the second operand (if it has more than one operand).
7222 First check for cases where an arithmetic operation is applied to a
7223 compound, conditional, or comparison operation. Push the arithmetic
7224 operation inside the compound or conditional to see if any folding
7225 can then be done. Convert comparison to conditional for this purpose.
7226 The also optimizes non-constant cases that used to be done in
7229 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7230 one of the operands is a comparison and the other is a comparison, a
7231 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7232 code below would make the expression more complex. Change it to a
7233 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7234 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7236 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7237 || code == EQ_EXPR || code == NE_EXPR)
7238 && ((truth_value_p (TREE_CODE (arg0))
7239 && (truth_value_p (TREE_CODE (arg1))
7240 || (TREE_CODE (arg1) == BIT_AND_EXPR
7241 && integer_onep (TREE_OPERAND (arg1, 1)))))
7242 || (truth_value_p (TREE_CODE (arg1))
7243 && (truth_value_p (TREE_CODE (arg0))
7244 || (TREE_CODE (arg0) == BIT_AND_EXPR
7245 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7247 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7248 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7251 fold_convert (boolean_type_node, arg0),
7252 fold_convert (boolean_type_node, arg1));
7254 if (code == EQ_EXPR)
7255 tem = invert_truthvalue (tem);
7257 return fold_convert (type, tem);
7260 if (TREE_CODE_CLASS (code) == tcc_binary
7261 || TREE_CODE_CLASS (code) == tcc_comparison)
7263 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7264 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7265 fold_build2 (code, type,
7266 TREE_OPERAND (arg0, 1), op1));
7267 if (TREE_CODE (arg1) == COMPOUND_EXPR
7268 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7269 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7270 fold_build2 (code, type,
7271 op0, TREE_OPERAND (arg1, 1)));
7273 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7275 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7277 /*cond_first_p=*/1);
7278 if (tem != NULL_TREE)
7282 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7284 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7286 /*cond_first_p=*/0);
7287 if (tem != NULL_TREE)
7295 /* A + (-B) -> A - B */
7296 if (TREE_CODE (arg1) == NEGATE_EXPR)
7297 return fold_build2 (MINUS_EXPR, type,
7298 fold_convert (type, arg0),
7299 fold_convert (type, TREE_OPERAND (arg1, 0)));
7300 /* (-A) + B -> B - A */
7301 if (TREE_CODE (arg0) == NEGATE_EXPR
7302 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7303 return fold_build2 (MINUS_EXPR, type,
7304 fold_convert (type, arg1),
7305 fold_convert (type, TREE_OPERAND (arg0, 0)));
7306 /* Convert ~A + 1 to -A. */
7307 if (INTEGRAL_TYPE_P (type)
7308 && TREE_CODE (arg0) == BIT_NOT_EXPR
7309 && integer_onep (arg1))
7310 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7312 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7314 if ((TREE_CODE (arg0) == MULT_EXPR
7315 || TREE_CODE (arg1) == MULT_EXPR)
7316 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7318 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7323 if (! FLOAT_TYPE_P (type))
7325 if (integer_zerop (arg1))
7326 return non_lvalue (fold_convert (type, arg0));
7328 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7329 with a constant, and the two constants have no bits in common,
7330 we should treat this as a BIT_IOR_EXPR since this may produce more
7332 if (TREE_CODE (arg0) == BIT_AND_EXPR
7333 && TREE_CODE (arg1) == BIT_AND_EXPR
7334 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7335 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7336 && integer_zerop (const_binop (BIT_AND_EXPR,
7337 TREE_OPERAND (arg0, 1),
7338 TREE_OPERAND (arg1, 1), 0)))
7340 code = BIT_IOR_EXPR;
7344 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7345 (plus (plus (mult) (mult)) (foo)) so that we can
7346 take advantage of the factoring cases below. */
7347 if (((TREE_CODE (arg0) == PLUS_EXPR
7348 || TREE_CODE (arg0) == MINUS_EXPR)
7349 && TREE_CODE (arg1) == MULT_EXPR)
7350 || ((TREE_CODE (arg1) == PLUS_EXPR
7351 || TREE_CODE (arg1) == MINUS_EXPR)
7352 && TREE_CODE (arg0) == MULT_EXPR))
7354 tree parg0, parg1, parg, marg;
7355 enum tree_code pcode;
7357 if (TREE_CODE (arg1) == MULT_EXPR)
7358 parg = arg0, marg = arg1;
7360 parg = arg1, marg = arg0;
7361 pcode = TREE_CODE (parg);
7362 parg0 = TREE_OPERAND (parg, 0);
7363 parg1 = TREE_OPERAND (parg, 1);
7367 if (TREE_CODE (parg0) == MULT_EXPR
7368 && TREE_CODE (parg1) != MULT_EXPR)
7369 return fold_build2 (pcode, type,
7370 fold_build2 (PLUS_EXPR, type,
7371 fold_convert (type, parg0),
7372 fold_convert (type, marg)),
7373 fold_convert (type, parg1));
7374 if (TREE_CODE (parg0) != MULT_EXPR
7375 && TREE_CODE (parg1) == MULT_EXPR)
7376 return fold_build2 (PLUS_EXPR, type,
7377 fold_convert (type, parg0),
7378 fold_build2 (pcode, type,
7379 fold_convert (type, marg),
7384 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7385 of the array. Loop optimizer sometimes produce this type of
7387 if (TREE_CODE (arg0) == ADDR_EXPR)
7389 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7391 return fold_convert (type, tem);
7393 else if (TREE_CODE (arg1) == ADDR_EXPR)
7395 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7397 return fold_convert (type, tem);
7402 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7403 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7404 return non_lvalue (fold_convert (type, arg0));
7406 /* Likewise if the operands are reversed. */
7407 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7408 return non_lvalue (fold_convert (type, arg1));
7410 /* Convert X + -C into X - C. */
7411 if (TREE_CODE (arg1) == REAL_CST
7412 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7414 tem = fold_negate_const (arg1, type);
7415 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7416 return fold_build2 (MINUS_EXPR, type,
7417 fold_convert (type, arg0),
7418 fold_convert (type, tem));
7421 if (flag_unsafe_math_optimizations
7422 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7423 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7424 && (tem = distribute_real_division (code, type, arg0, arg1)))
7427 /* Convert x+x into x*2.0. */
7428 if (operand_equal_p (arg0, arg1, 0)
7429 && SCALAR_FLOAT_TYPE_P (type))
7430 return fold_build2 (MULT_EXPR, type, arg0,
7431 build_real (type, dconst2));
7433 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7434 if (flag_unsafe_math_optimizations
7435 && TREE_CODE (arg1) == PLUS_EXPR
7436 && TREE_CODE (arg0) != MULT_EXPR)
7438 tree tree10 = TREE_OPERAND (arg1, 0);
7439 tree tree11 = TREE_OPERAND (arg1, 1);
7440 if (TREE_CODE (tree11) == MULT_EXPR
7441 && TREE_CODE (tree10) == MULT_EXPR)
7444 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7445 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7448 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7449 if (flag_unsafe_math_optimizations
7450 && TREE_CODE (arg0) == PLUS_EXPR
7451 && TREE_CODE (arg1) != MULT_EXPR)
7453 tree tree00 = TREE_OPERAND (arg0, 0);
7454 tree tree01 = TREE_OPERAND (arg0, 1);
7455 if (TREE_CODE (tree01) == MULT_EXPR
7456 && TREE_CODE (tree00) == MULT_EXPR)
7459 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7460 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7466 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7467 is a rotate of A by C1 bits. */
7468 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7469 is a rotate of A by B bits. */
7471 enum tree_code code0, code1;
7472 code0 = TREE_CODE (arg0);
7473 code1 = TREE_CODE (arg1);
7474 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7475 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7476 && operand_equal_p (TREE_OPERAND (arg0, 0),
7477 TREE_OPERAND (arg1, 0), 0)
7478 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7480 tree tree01, tree11;
7481 enum tree_code code01, code11;
7483 tree01 = TREE_OPERAND (arg0, 1);
7484 tree11 = TREE_OPERAND (arg1, 1);
7485 STRIP_NOPS (tree01);
7486 STRIP_NOPS (tree11);
7487 code01 = TREE_CODE (tree01);
7488 code11 = TREE_CODE (tree11);
7489 if (code01 == INTEGER_CST
7490 && code11 == INTEGER_CST
7491 && TREE_INT_CST_HIGH (tree01) == 0
7492 && TREE_INT_CST_HIGH (tree11) == 0
7493 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7494 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7495 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7496 code0 == LSHIFT_EXPR ? tree01 : tree11);
7497 else if (code11 == MINUS_EXPR)
7499 tree tree110, tree111;
7500 tree110 = TREE_OPERAND (tree11, 0);
7501 tree111 = TREE_OPERAND (tree11, 1);
7502 STRIP_NOPS (tree110);
7503 STRIP_NOPS (tree111);
7504 if (TREE_CODE (tree110) == INTEGER_CST
7505 && 0 == compare_tree_int (tree110,
7507 (TREE_TYPE (TREE_OPERAND
7509 && operand_equal_p (tree01, tree111, 0))
7510 return build2 ((code0 == LSHIFT_EXPR
7513 type, TREE_OPERAND (arg0, 0), tree01);
7515 else if (code01 == MINUS_EXPR)
7517 tree tree010, tree011;
7518 tree010 = TREE_OPERAND (tree01, 0);
7519 tree011 = TREE_OPERAND (tree01, 1);
7520 STRIP_NOPS (tree010);
7521 STRIP_NOPS (tree011);
7522 if (TREE_CODE (tree010) == INTEGER_CST
7523 && 0 == compare_tree_int (tree010,
7525 (TREE_TYPE (TREE_OPERAND
7527 && operand_equal_p (tree11, tree011, 0))
7528 return build2 ((code0 != LSHIFT_EXPR
7531 type, TREE_OPERAND (arg0, 0), tree11);
7537 /* In most languages, can't associate operations on floats through
7538 parentheses. Rather than remember where the parentheses were, we
7539 don't associate floats at all, unless the user has specified
7540 -funsafe-math-optimizations. */
7543 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7545 tree var0, con0, lit0, minus_lit0;
7546 tree var1, con1, lit1, minus_lit1;
7548 /* Split both trees into variables, constants, and literals. Then
7549 associate each group together, the constants with literals,
7550 then the result with variables. This increases the chances of
7551 literals being recombined later and of generating relocatable
7552 expressions for the sum of a constant and literal. */
7553 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7554 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7555 code == MINUS_EXPR);
7557 /* Only do something if we found more than two objects. Otherwise,
7558 nothing has changed and we risk infinite recursion. */
7559 if (2 < ((var0 != 0) + (var1 != 0)
7560 + (con0 != 0) + (con1 != 0)
7561 + (lit0 != 0) + (lit1 != 0)
7562 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7564 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7565 if (code == MINUS_EXPR)
7568 var0 = associate_trees (var0, var1, code, type);
7569 con0 = associate_trees (con0, con1, code, type);
7570 lit0 = associate_trees (lit0, lit1, code, type);
7571 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7573 /* Preserve the MINUS_EXPR if the negative part of the literal is
7574 greater than the positive part. Otherwise, the multiplicative
7575 folding code (i.e extract_muldiv) may be fooled in case
7576 unsigned constants are subtracted, like in the following
7577 example: ((X*2 + 4) - 8U)/2. */
7578 if (minus_lit0 && lit0)
7580 if (TREE_CODE (lit0) == INTEGER_CST
7581 && TREE_CODE (minus_lit0) == INTEGER_CST
7582 && tree_int_cst_lt (lit0, minus_lit0))
7584 minus_lit0 = associate_trees (minus_lit0, lit0,
7590 lit0 = associate_trees (lit0, minus_lit0,
7598 return fold_convert (type,
7599 associate_trees (var0, minus_lit0,
7603 con0 = associate_trees (con0, minus_lit0,
7605 return fold_convert (type,
7606 associate_trees (var0, con0,
7611 con0 = associate_trees (con0, lit0, code, type);
7612 return fold_convert (type, associate_trees (var0, con0,
7619 t1 = const_binop (code, arg0, arg1, 0);
7620 if (t1 != NULL_TREE)
7622 /* The return value should always have
7623 the same type as the original expression. */
7624 if (TREE_TYPE (t1) != type)
7625 t1 = fold_convert (type, t1);
7632 /* A - (-B) -> A + B */
7633 if (TREE_CODE (arg1) == NEGATE_EXPR)
7634 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7635 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7636 if (TREE_CODE (arg0) == NEGATE_EXPR
7637 && (FLOAT_TYPE_P (type)
7638 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7639 && negate_expr_p (arg1)
7640 && reorder_operands_p (arg0, arg1))
7641 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7642 TREE_OPERAND (arg0, 0));
7643 /* Convert -A - 1 to ~A. */
7644 if (INTEGRAL_TYPE_P (type)
7645 && TREE_CODE (arg0) == NEGATE_EXPR
7646 && integer_onep (arg1))
7647 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7649 /* Convert -1 - A to ~A. */
7650 if (INTEGRAL_TYPE_P (type)
7651 && integer_all_onesp (arg0))
7652 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7654 if (! FLOAT_TYPE_P (type))
7656 if (! wins && integer_zerop (arg0))
7657 return negate_expr (fold_convert (type, arg1));
7658 if (integer_zerop (arg1))
7659 return non_lvalue (fold_convert (type, arg0));
7661 /* Fold A - (A & B) into ~B & A. */
7662 if (!TREE_SIDE_EFFECTS (arg0)
7663 && TREE_CODE (arg1) == BIT_AND_EXPR)
7665 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7666 return fold_build2 (BIT_AND_EXPR, type,
7667 fold_build1 (BIT_NOT_EXPR, type,
7668 TREE_OPERAND (arg1, 0)),
7670 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7671 return fold_build2 (BIT_AND_EXPR, type,
7672 fold_build1 (BIT_NOT_EXPR, type,
7673 TREE_OPERAND (arg1, 1)),
7677 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7678 any power of 2 minus 1. */
7679 if (TREE_CODE (arg0) == BIT_AND_EXPR
7680 && TREE_CODE (arg1) == BIT_AND_EXPR
7681 && operand_equal_p (TREE_OPERAND (arg0, 0),
7682 TREE_OPERAND (arg1, 0), 0))
7684 tree mask0 = TREE_OPERAND (arg0, 1);
7685 tree mask1 = TREE_OPERAND (arg1, 1);
7686 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7688 if (operand_equal_p (tem, mask1, 0))
7690 tem = fold_build2 (BIT_XOR_EXPR, type,
7691 TREE_OPERAND (arg0, 0), mask1);
7692 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7697 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7698 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7699 return non_lvalue (fold_convert (type, arg0));
7701 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7702 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7703 (-ARG1 + ARG0) reduces to -ARG1. */
7704 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7705 return negate_expr (fold_convert (type, arg1));
7707 /* Fold &x - &x. This can happen from &x.foo - &x.
7708 This is unsafe for certain floats even in non-IEEE formats.
7709 In IEEE, it is unsafe because it does wrong for NaNs.
7710 Also note that operand_equal_p is always false if an operand
7713 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7714 && operand_equal_p (arg0, arg1, 0))
7715 return fold_convert (type, integer_zero_node);
7717 /* A - B -> A + (-B) if B is easily negatable. */
7718 if (!wins && negate_expr_p (arg1)
7719 && ((FLOAT_TYPE_P (type)
7720 /* Avoid this transformation if B is a positive REAL_CST. */
7721 && (TREE_CODE (arg1) != REAL_CST
7722 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7723 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7724 return fold_build2 (PLUS_EXPR, type,
7725 fold_convert (type, arg0),
7726 fold_convert (type, negate_expr (arg1)));
7728 /* Try folding difference of addresses. */
7732 if ((TREE_CODE (arg0) == ADDR_EXPR
7733 || TREE_CODE (arg1) == ADDR_EXPR)
7734 && ptr_difference_const (arg0, arg1, &diff))
7735 return build_int_cst_type (type, diff);
7738 /* Fold &a[i] - &a[j] to i-j. */
7739 if (TREE_CODE (arg0) == ADDR_EXPR
7740 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7741 && TREE_CODE (arg1) == ADDR_EXPR
7742 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7744 tree aref0 = TREE_OPERAND (arg0, 0);
7745 tree aref1 = TREE_OPERAND (arg1, 0);
7746 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7747 TREE_OPERAND (aref1, 0), 0))
7749 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7750 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7751 tree esz = array_ref_element_size (aref0);
7752 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7753 return fold_build2 (MULT_EXPR, type, diff,
7754 fold_convert (type, esz));
7759 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7760 of the array. Loop optimizer sometimes produce this type of
7762 if (TREE_CODE (arg0) == ADDR_EXPR)
7764 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7766 return fold_convert (type, tem);
7769 if (flag_unsafe_math_optimizations
7770 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7771 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7772 && (tem = distribute_real_division (code, type, arg0, arg1)))
7775 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7777 if ((TREE_CODE (arg0) == MULT_EXPR
7778 || TREE_CODE (arg1) == MULT_EXPR)
7779 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7781 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7789 /* (-A) * (-B) -> A * B */
7790 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7791 return fold_build2 (MULT_EXPR, type,
7792 TREE_OPERAND (arg0, 0),
7793 negate_expr (arg1));
7794 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7795 return fold_build2 (MULT_EXPR, type,
7797 TREE_OPERAND (arg1, 0));
7799 if (! FLOAT_TYPE_P (type))
7801 if (integer_zerop (arg1))
7802 return omit_one_operand (type, arg1, arg0);
7803 if (integer_onep (arg1))
7804 return non_lvalue (fold_convert (type, arg0));
7805 /* Transform x * -1 into -x. */
7806 if (integer_all_onesp (arg1))
7807 return fold_convert (type, negate_expr (arg0));
7809 /* (a * (1 << b)) is (a << b) */
7810 if (TREE_CODE (arg1) == LSHIFT_EXPR
7811 && integer_onep (TREE_OPERAND (arg1, 0)))
7812 return fold_build2 (LSHIFT_EXPR, type, arg0,
7813 TREE_OPERAND (arg1, 1));
7814 if (TREE_CODE (arg0) == LSHIFT_EXPR
7815 && integer_onep (TREE_OPERAND (arg0, 0)))
7816 return fold_build2 (LSHIFT_EXPR, type, arg1,
7817 TREE_OPERAND (arg0, 1));
7819 if (TREE_CODE (arg1) == INTEGER_CST
7820 && 0 != (tem = extract_muldiv (op0,
7821 fold_convert (type, arg1),
7823 return fold_convert (type, tem);
7828 /* Maybe fold x * 0 to 0. The expressions aren't the same
7829 when x is NaN, since x * 0 is also NaN. Nor are they the
7830 same in modes with signed zeros, since multiplying a
7831 negative value by 0 gives -0, not +0. */
7832 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7833 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7834 && real_zerop (arg1))
7835 return omit_one_operand (type, arg1, arg0);
7836 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7837 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7838 && real_onep (arg1))
7839 return non_lvalue (fold_convert (type, arg0));
7841 /* Transform x * -1.0 into -x. */
7842 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7843 && real_minus_onep (arg1))
7844 return fold_convert (type, negate_expr (arg0));
7846 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7847 if (flag_unsafe_math_optimizations
7848 && TREE_CODE (arg0) == RDIV_EXPR
7849 && TREE_CODE (arg1) == REAL_CST
7850 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7852 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7855 return fold_build2 (RDIV_EXPR, type, tem,
7856 TREE_OPERAND (arg0, 1));
7859 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7860 if (operand_equal_p (arg0, arg1, 0))
7862 tree tem = fold_strip_sign_ops (arg0);
7863 if (tem != NULL_TREE)
7865 tem = fold_convert (type, tem);
7866 return fold_build2 (MULT_EXPR, type, tem, tem);
7870 if (flag_unsafe_math_optimizations)
7872 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7873 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7875 /* Optimizations of root(...)*root(...). */
7876 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7878 tree rootfn, arg, arglist;
7879 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7880 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7882 /* Optimize sqrt(x)*sqrt(x) as x. */
7883 if (BUILTIN_SQRT_P (fcode0)
7884 && operand_equal_p (arg00, arg10, 0)
7885 && ! HONOR_SNANS (TYPE_MODE (type)))
7888 /* Optimize root(x)*root(y) as root(x*y). */
7889 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7890 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7891 arglist = build_tree_list (NULL_TREE, arg);
7892 return build_function_call_expr (rootfn, arglist);
7895 /* Optimize expN(x)*expN(y) as expN(x+y). */
7896 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7898 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7899 tree arg = fold_build2 (PLUS_EXPR, type,
7900 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7901 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7902 tree arglist = build_tree_list (NULL_TREE, arg);
7903 return build_function_call_expr (expfn, arglist);
7906 /* Optimizations of pow(...)*pow(...). */
7907 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7908 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7909 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7911 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7912 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7914 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7915 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7918 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7919 if (operand_equal_p (arg01, arg11, 0))
7921 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7922 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7923 tree arglist = tree_cons (NULL_TREE, arg,
7924 build_tree_list (NULL_TREE,
7926 return build_function_call_expr (powfn, arglist);
7929 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7930 if (operand_equal_p (arg00, arg10, 0))
7932 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7933 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7934 tree arglist = tree_cons (NULL_TREE, arg00,
7935 build_tree_list (NULL_TREE,
7937 return build_function_call_expr (powfn, arglist);
7941 /* Optimize tan(x)*cos(x) as sin(x). */
7942 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7943 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7944 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7945 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7946 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7947 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7948 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7949 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7951 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7953 if (sinfn != NULL_TREE)
7954 return build_function_call_expr (sinfn,
7955 TREE_OPERAND (arg0, 1));
7958 /* Optimize x*pow(x,c) as pow(x,c+1). */
7959 if (fcode1 == BUILT_IN_POW
7960 || fcode1 == BUILT_IN_POWF
7961 || fcode1 == BUILT_IN_POWL)
7963 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7964 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7966 if (TREE_CODE (arg11) == REAL_CST
7967 && ! TREE_CONSTANT_OVERFLOW (arg11)
7968 && operand_equal_p (arg0, arg10, 0))
7970 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7974 c = TREE_REAL_CST (arg11);
7975 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7976 arg = build_real (type, c);
7977 arglist = build_tree_list (NULL_TREE, arg);
7978 arglist = tree_cons (NULL_TREE, arg0, arglist);
7979 return build_function_call_expr (powfn, arglist);
7983 /* Optimize pow(x,c)*x as pow(x,c+1). */
7984 if (fcode0 == BUILT_IN_POW
7985 || fcode0 == BUILT_IN_POWF
7986 || fcode0 == BUILT_IN_POWL)
7988 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7989 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7991 if (TREE_CODE (arg01) == REAL_CST
7992 && ! TREE_CONSTANT_OVERFLOW (arg01)
7993 && operand_equal_p (arg1, arg00, 0))
7995 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7999 c = TREE_REAL_CST (arg01);
8000 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8001 arg = build_real (type, c);
8002 arglist = build_tree_list (NULL_TREE, arg);
8003 arglist = tree_cons (NULL_TREE, arg1, arglist);
8004 return build_function_call_expr (powfn, arglist);
8008 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8010 && operand_equal_p (arg0, arg1, 0))
8012 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8016 tree arg = build_real (type, dconst2);
8017 tree arglist = build_tree_list (NULL_TREE, arg);
8018 arglist = tree_cons (NULL_TREE, arg0, arglist);
8019 return build_function_call_expr (powfn, arglist);
8028 if (integer_all_onesp (arg1))
8029 return omit_one_operand (type, arg1, arg0);
8030 if (integer_zerop (arg1))
8031 return non_lvalue (fold_convert (type, arg0));
8032 if (operand_equal_p (arg0, arg1, 0))
8033 return non_lvalue (fold_convert (type, arg0));
8036 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8037 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8039 t1 = build_int_cst (type, -1);
8040 t1 = force_fit_type (t1, 0, false, false);
8041 return omit_one_operand (type, t1, arg1);
8045 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8046 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8048 t1 = build_int_cst (type, -1);
8049 t1 = force_fit_type (t1, 0, false, false);
8050 return omit_one_operand (type, t1, arg0);
8053 t1 = distribute_bit_expr (code, type, arg0, arg1);
8054 if (t1 != NULL_TREE)
8057 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8059 This results in more efficient code for machines without a NAND
8060 instruction. Combine will canonicalize to the first form
8061 which will allow use of NAND instructions provided by the
8062 backend if they exist. */
8063 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8064 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8066 return fold_build1 (BIT_NOT_EXPR, type,
8067 build2 (BIT_AND_EXPR, type,
8068 TREE_OPERAND (arg0, 0),
8069 TREE_OPERAND (arg1, 0)));
8072 /* See if this can be simplified into a rotate first. If that
8073 is unsuccessful continue in the association code. */
8077 if (integer_zerop (arg1))
8078 return non_lvalue (fold_convert (type, arg0));
8079 if (integer_all_onesp (arg1))
8080 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8081 if (operand_equal_p (arg0, arg1, 0))
8082 return omit_one_operand (type, integer_zero_node, arg0);
8085 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8086 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8088 t1 = build_int_cst (type, -1);
8089 t1 = force_fit_type (t1, 0, false, false);
8090 return omit_one_operand (type, t1, arg1);
8094 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8095 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8097 t1 = build_int_cst (type, -1);
8098 t1 = force_fit_type (t1, 0, false, false);
8099 return omit_one_operand (type, t1, arg0);
8102 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8103 with a constant, and the two constants have no bits in common,
8104 we should treat this as a BIT_IOR_EXPR since this may produce more
8106 if (TREE_CODE (arg0) == BIT_AND_EXPR
8107 && TREE_CODE (arg1) == BIT_AND_EXPR
8108 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8109 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8110 && integer_zerop (const_binop (BIT_AND_EXPR,
8111 TREE_OPERAND (arg0, 1),
8112 TREE_OPERAND (arg1, 1), 0)))
8114 code = BIT_IOR_EXPR;
8118 /* (X | Y) ^ X -> Y & ~ X*/
8119 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8120 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8122 tree t2 = TREE_OPERAND (arg0, 1);
8123 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8125 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8126 fold_convert (type, t1));
8130 /* (Y | X) ^ X -> Y & ~ X*/
8131 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8132 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8134 tree t2 = TREE_OPERAND (arg0, 0);
8135 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8137 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8138 fold_convert (type, t1));
8142 /* X ^ (X | Y) -> Y & ~ X*/
8143 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8144 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8146 tree t2 = TREE_OPERAND (arg1, 1);
8147 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8149 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8150 fold_convert (type, t1));
8154 /* X ^ (Y | X) -> Y & ~ X*/
8155 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8156 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8158 tree t2 = TREE_OPERAND (arg1, 0);
8159 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8161 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8162 fold_convert (type, t1));
8166 /* Convert ~X ^ ~Y to X ^ Y. */
8167 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8168 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8169 return fold_build2 (code, type,
8170 fold_convert (type, TREE_OPERAND (arg0, 0)),
8171 fold_convert (type, TREE_OPERAND (arg1, 0)));
8173 /* See if this can be simplified into a rotate first. If that
8174 is unsuccessful continue in the association code. */
8178 if (integer_all_onesp (arg1))
8179 return non_lvalue (fold_convert (type, arg0));
8180 if (integer_zerop (arg1))
8181 return omit_one_operand (type, arg1, arg0);
8182 if (operand_equal_p (arg0, arg1, 0))
8183 return non_lvalue (fold_convert (type, arg0));
8185 /* ~X & X is always zero. */
8186 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8187 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8188 return omit_one_operand (type, integer_zero_node, arg1);
8190 /* X & ~X is always zero. */
8191 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8192 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8193 return omit_one_operand (type, integer_zero_node, arg0);
8195 t1 = distribute_bit_expr (code, type, arg0, arg1);
8196 if (t1 != NULL_TREE)
8198 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8199 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8200 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8203 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8205 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8206 && (~TREE_INT_CST_LOW (arg1)
8207 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8208 return fold_convert (type, TREE_OPERAND (arg0, 0));
8211 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8213 This results in more efficient code for machines without a NOR
8214 instruction. Combine will canonicalize to the first form
8215 which will allow use of NOR instructions provided by the
8216 backend if they exist. */
8217 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8218 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8220 return fold_build1 (BIT_NOT_EXPR, type,
8221 build2 (BIT_IOR_EXPR, type,
8222 TREE_OPERAND (arg0, 0),
8223 TREE_OPERAND (arg1, 0)));
8229 /* Don't touch a floating-point divide by zero unless the mode
8230 of the constant can represent infinity. */
8231 if (TREE_CODE (arg1) == REAL_CST
8232 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8233 && real_zerop (arg1))
8236 /* Optimize A / A to 1.0 if we don't care about
8237 NaNs or Infinities. */
8238 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8239 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8240 && operand_equal_p (arg0, arg1, 0))
8242 tree r = build_real (TREE_TYPE (arg0), dconst1);
8244 return omit_two_operands (type, r, arg0, arg1);
8247 /* (-A) / (-B) -> A / B */
8248 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8249 return fold_build2 (RDIV_EXPR, type,
8250 TREE_OPERAND (arg0, 0),
8251 negate_expr (arg1));
8252 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8253 return fold_build2 (RDIV_EXPR, type,
8255 TREE_OPERAND (arg1, 0));
8257 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8258 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8259 && real_onep (arg1))
8260 return non_lvalue (fold_convert (type, arg0));
8262 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8263 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8264 && real_minus_onep (arg1))
8265 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8267 /* If ARG1 is a constant, we can convert this to a multiply by the
8268 reciprocal. This does not have the same rounding properties,
8269 so only do this if -funsafe-math-optimizations. We can actually
8270 always safely do it if ARG1 is a power of two, but it's hard to
8271 tell if it is or not in a portable manner. */
8272 if (TREE_CODE (arg1) == REAL_CST)
8274 if (flag_unsafe_math_optimizations
8275 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8277 return fold_build2 (MULT_EXPR, type, arg0, tem);
8278 /* Find the reciprocal if optimizing and the result is exact. */
8282 r = TREE_REAL_CST (arg1);
8283 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8285 tem = build_real (type, r);
8286 return fold_build2 (MULT_EXPR, type,
8287 fold_convert (type, arg0), tem);
8291 /* Convert A/B/C to A/(B*C). */
8292 if (flag_unsafe_math_optimizations
8293 && TREE_CODE (arg0) == RDIV_EXPR)
8294 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8295 fold_build2 (MULT_EXPR, type,
8296 TREE_OPERAND (arg0, 1), arg1));
8298 /* Convert A/(B/C) to (A/B)*C. */
8299 if (flag_unsafe_math_optimizations
8300 && TREE_CODE (arg1) == RDIV_EXPR)
8301 return fold_build2 (MULT_EXPR, type,
8302 fold_build2 (RDIV_EXPR, type, arg0,
8303 TREE_OPERAND (arg1, 0)),
8304 TREE_OPERAND (arg1, 1));
8306 /* Convert C1/(X*C2) into (C1/C2)/X. */
8307 if (flag_unsafe_math_optimizations
8308 && TREE_CODE (arg1) == MULT_EXPR
8309 && TREE_CODE (arg0) == REAL_CST
8310 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8312 tree tem = const_binop (RDIV_EXPR, arg0,
8313 TREE_OPERAND (arg1, 1), 0);
8315 return fold_build2 (RDIV_EXPR, type, tem,
8316 TREE_OPERAND (arg1, 0));
8319 if (flag_unsafe_math_optimizations)
8321 enum built_in_function fcode = builtin_mathfn_code (arg1);
8322 /* Optimize x/expN(y) into x*expN(-y). */
8323 if (BUILTIN_EXPONENT_P (fcode))
8325 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8326 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8327 tree arglist = build_tree_list (NULL_TREE,
8328 fold_convert (type, arg));
8329 arg1 = build_function_call_expr (expfn, arglist);
8330 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8333 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8334 if (fcode == BUILT_IN_POW
8335 || fcode == BUILT_IN_POWF
8336 || fcode == BUILT_IN_POWL)
8338 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8339 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8340 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8341 tree neg11 = fold_convert (type, negate_expr (arg11));
8342 tree arglist = tree_cons(NULL_TREE, arg10,
8343 build_tree_list (NULL_TREE, neg11));
8344 arg1 = build_function_call_expr (powfn, arglist);
8345 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8349 if (flag_unsafe_math_optimizations)
8351 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8352 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8354 /* Optimize sin(x)/cos(x) as tan(x). */
8355 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8356 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8357 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8358 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8359 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8361 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8363 if (tanfn != NULL_TREE)
8364 return build_function_call_expr (tanfn,
8365 TREE_OPERAND (arg0, 1));
8368 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8369 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8370 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8371 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8372 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8373 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8375 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8377 if (tanfn != NULL_TREE)
8379 tree tmp = TREE_OPERAND (arg0, 1);
8380 tmp = build_function_call_expr (tanfn, tmp);
8381 return fold_build2 (RDIV_EXPR, type,
8382 build_real (type, dconst1), tmp);
8386 /* Optimize pow(x,c)/x as pow(x,c-1). */
8387 if (fcode0 == BUILT_IN_POW
8388 || fcode0 == BUILT_IN_POWF
8389 || fcode0 == BUILT_IN_POWL)
8391 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8392 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8393 if (TREE_CODE (arg01) == REAL_CST
8394 && ! TREE_CONSTANT_OVERFLOW (arg01)
8395 && operand_equal_p (arg1, arg00, 0))
8397 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8401 c = TREE_REAL_CST (arg01);
8402 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8403 arg = build_real (type, c);
8404 arglist = build_tree_list (NULL_TREE, arg);
8405 arglist = tree_cons (NULL_TREE, arg1, arglist);
8406 return build_function_call_expr (powfn, arglist);
8412 case TRUNC_DIV_EXPR:
8413 case ROUND_DIV_EXPR:
8414 case FLOOR_DIV_EXPR:
8416 case EXACT_DIV_EXPR:
8417 if (integer_onep (arg1))
8418 return non_lvalue (fold_convert (type, arg0));
8419 if (integer_zerop (arg1))
8422 if (!TYPE_UNSIGNED (type)
8423 && TREE_CODE (arg1) == INTEGER_CST
8424 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8425 && TREE_INT_CST_HIGH (arg1) == -1)
8426 return fold_convert (type, negate_expr (arg0));
8428 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8429 operation, EXACT_DIV_EXPR.
8431 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8432 At one time others generated faster code, it's not clear if they do
8433 after the last round to changes to the DIV code in expmed.c. */
8434 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8435 && multiple_of_p (type, arg0, arg1))
8436 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8438 if (TREE_CODE (arg1) == INTEGER_CST
8439 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8440 return fold_convert (type, tem);
8445 case FLOOR_MOD_EXPR:
8446 case ROUND_MOD_EXPR:
8447 case TRUNC_MOD_EXPR:
8448 /* X % 1 is always zero, but be sure to preserve any side
8450 if (integer_onep (arg1))
8451 return omit_one_operand (type, integer_zero_node, arg0);
8453 /* X % 0, return X % 0 unchanged so that we can get the
8454 proper warnings and errors. */
8455 if (integer_zerop (arg1))
8458 /* 0 % X is always zero, but be sure to preserve any side
8459 effects in X. Place this after checking for X == 0. */
8460 if (integer_zerop (arg0))
8461 return omit_one_operand (type, integer_zero_node, arg1);
8463 /* X % -1 is zero. */
8464 if (!TYPE_UNSIGNED (type)
8465 && TREE_CODE (arg1) == INTEGER_CST
8466 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8467 && TREE_INT_CST_HIGH (arg1) == -1)
8468 return omit_one_operand (type, integer_zero_node, arg0);
8470 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8471 i.e. "X % C" into "X & C2", if X and C are positive. */
8472 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8473 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8474 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8476 unsigned HOST_WIDE_INT high, low;
8480 l = tree_log2 (arg1);
8481 if (l >= HOST_BITS_PER_WIDE_INT)
8483 high = ((unsigned HOST_WIDE_INT) 1
8484 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8490 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8493 mask = build_int_cst_wide (type, low, high);
8494 return fold_build2 (BIT_AND_EXPR, type,
8495 fold_convert (type, arg0), mask);
8498 /* X % -C is the same as X % C. */
8499 if (code == TRUNC_MOD_EXPR
8500 && !TYPE_UNSIGNED (type)
8501 && TREE_CODE (arg1) == INTEGER_CST
8502 && !TREE_CONSTANT_OVERFLOW (arg1)
8503 && TREE_INT_CST_HIGH (arg1) < 0
8505 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8506 && !sign_bit_p (arg1, arg1))
8507 return fold_build2 (code, type, fold_convert (type, arg0),
8508 fold_convert (type, negate_expr (arg1)));
8510 /* X % -Y is the same as X % Y. */
8511 if (code == TRUNC_MOD_EXPR
8512 && !TYPE_UNSIGNED (type)
8513 && TREE_CODE (arg1) == NEGATE_EXPR
8515 return fold_build2 (code, type, fold_convert (type, arg0),
8516 fold_convert (type, TREE_OPERAND (arg1, 0)));
8518 if (TREE_CODE (arg1) == INTEGER_CST
8519 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8520 return fold_convert (type, tem);
8526 if (integer_all_onesp (arg0))
8527 return omit_one_operand (type, arg0, arg1);
8531 /* Optimize -1 >> x for arithmetic right shifts. */
8532 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8533 return omit_one_operand (type, arg0, arg1);
8534 /* ... fall through ... */
8538 if (integer_zerop (arg1))
8539 return non_lvalue (fold_convert (type, arg0));
8540 if (integer_zerop (arg0))
8541 return omit_one_operand (type, arg0, arg1);
8543 /* Since negative shift count is not well-defined,
8544 don't try to compute it in the compiler. */
8545 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8548 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8549 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8550 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8551 && host_integerp (TREE_OPERAND (arg0, 1), false)
8552 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8554 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8555 + TREE_INT_CST_LOW (arg1));
8557 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8558 being well defined. */
8559 if (low >= TYPE_PRECISION (type))
8561 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8562 low = low % TYPE_PRECISION (type);
8563 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8564 return build_int_cst (type, 0);
8566 low = TYPE_PRECISION (type) - 1;
8569 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8570 build_int_cst (type, low));
8573 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8574 into x & ((unsigned)-1 >> c) for unsigned types. */
8575 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8576 || (TYPE_UNSIGNED (type)
8577 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8578 && host_integerp (arg1, false)
8579 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8580 && host_integerp (TREE_OPERAND (arg0, 1), false)
8581 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8583 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8584 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8590 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8592 lshift = build_int_cst (type, -1);
8593 lshift = int_const_binop (code, lshift, arg1, 0);
8595 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8599 /* Rewrite an LROTATE_EXPR by a constant into an
8600 RROTATE_EXPR by a new constant. */
8601 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8603 tree tem = build_int_cst (NULL_TREE,
8604 GET_MODE_BITSIZE (TYPE_MODE (type)));
8605 tem = fold_convert (TREE_TYPE (arg1), tem);
8606 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8607 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8610 /* If we have a rotate of a bit operation with the rotate count and
8611 the second operand of the bit operation both constant,
8612 permute the two operations. */
8613 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8614 && (TREE_CODE (arg0) == BIT_AND_EXPR
8615 || TREE_CODE (arg0) == BIT_IOR_EXPR
8616 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8617 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8618 return fold_build2 (TREE_CODE (arg0), type,
8619 fold_build2 (code, type,
8620 TREE_OPERAND (arg0, 0), arg1),
8621 fold_build2 (code, type,
8622 TREE_OPERAND (arg0, 1), arg1));
8624 /* Two consecutive rotates adding up to the width of the mode can
8626 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8627 && TREE_CODE (arg0) == RROTATE_EXPR
8628 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8629 && TREE_INT_CST_HIGH (arg1) == 0
8630 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8631 && ((TREE_INT_CST_LOW (arg1)
8632 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8633 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8634 return TREE_OPERAND (arg0, 0);
8639 if (operand_equal_p (arg0, arg1, 0))
8640 return omit_one_operand (type, arg0, arg1);
8641 if (INTEGRAL_TYPE_P (type)
8642 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8643 return omit_one_operand (type, arg1, arg0);
8647 if (operand_equal_p (arg0, arg1, 0))
8648 return omit_one_operand (type, arg0, arg1);
8649 if (INTEGRAL_TYPE_P (type)
8650 && TYPE_MAX_VALUE (type)
8651 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8652 return omit_one_operand (type, arg1, arg0);
8655 case TRUTH_ANDIF_EXPR:
8656 /* Note that the operands of this must be ints
8657 and their values must be 0 or 1.
8658 ("true" is a fixed value perhaps depending on the language.) */
8659 /* If first arg is constant zero, return it. */
8660 if (integer_zerop (arg0))
8661 return fold_convert (type, arg0);
8662 case TRUTH_AND_EXPR:
8663 /* If either arg is constant true, drop it. */
8664 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8665 return non_lvalue (fold_convert (type, arg1));
8666 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8667 /* Preserve sequence points. */
8668 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8669 return non_lvalue (fold_convert (type, arg0));
8670 /* If second arg is constant zero, result is zero, but first arg
8671 must be evaluated. */
8672 if (integer_zerop (arg1))
8673 return omit_one_operand (type, arg1, arg0);
8674 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8675 case will be handled here. */
8676 if (integer_zerop (arg0))
8677 return omit_one_operand (type, arg0, arg1);
8679 /* !X && X is always false. */
8680 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8681 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8682 return omit_one_operand (type, integer_zero_node, arg1);
8683 /* X && !X is always false. */
8684 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8685 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8686 return omit_one_operand (type, integer_zero_node, arg0);
8688 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8689 means A >= Y && A != MAX, but in this case we know that
8692 if (!TREE_SIDE_EFFECTS (arg0)
8693 && !TREE_SIDE_EFFECTS (arg1))
8695 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8696 if (tem && !operand_equal_p (tem, arg0, 0))
8697 return fold_build2 (code, type, tem, arg1);
8699 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8700 if (tem && !operand_equal_p (tem, arg1, 0))
8701 return fold_build2 (code, type, arg0, tem);
8705 /* We only do these simplifications if we are optimizing. */
8709 /* Check for things like (A || B) && (A || C). We can convert this
8710 to A || (B && C). Note that either operator can be any of the four
8711 truth and/or operations and the transformation will still be
8712 valid. Also note that we only care about order for the
8713 ANDIF and ORIF operators. If B contains side effects, this
8714 might change the truth-value of A. */
8715 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8716 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8717 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8718 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8719 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8720 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8722 tree a00 = TREE_OPERAND (arg0, 0);
8723 tree a01 = TREE_OPERAND (arg0, 1);
8724 tree a10 = TREE_OPERAND (arg1, 0);
8725 tree a11 = TREE_OPERAND (arg1, 1);
8726 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8727 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8728 && (code == TRUTH_AND_EXPR
8729 || code == TRUTH_OR_EXPR));
8731 if (operand_equal_p (a00, a10, 0))
8732 return fold_build2 (TREE_CODE (arg0), type, a00,
8733 fold_build2 (code, type, a01, a11));
8734 else if (commutative && operand_equal_p (a00, a11, 0))
8735 return fold_build2 (TREE_CODE (arg0), type, a00,
8736 fold_build2 (code, type, a01, a10));
8737 else if (commutative && operand_equal_p (a01, a10, 0))
8738 return fold_build2 (TREE_CODE (arg0), type, a01,
8739 fold_build2 (code, type, a00, a11));
8741 /* This case if tricky because we must either have commutative
8742 operators or else A10 must not have side-effects. */
8744 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8745 && operand_equal_p (a01, a11, 0))
8746 return fold_build2 (TREE_CODE (arg0), type,
8747 fold_build2 (code, type, a00, a10),
8751 /* See if we can build a range comparison. */
8752 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8755 /* Check for the possibility of merging component references. If our
8756 lhs is another similar operation, try to merge its rhs with our
8757 rhs. Then try to merge our lhs and rhs. */
8758 if (TREE_CODE (arg0) == code
8759 && 0 != (tem = fold_truthop (code, type,
8760 TREE_OPERAND (arg0, 1), arg1)))
8761 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8763 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8768 case TRUTH_ORIF_EXPR:
8769 /* Note that the operands of this must be ints
8770 and their values must be 0 or true.
8771 ("true" is a fixed value perhaps depending on the language.) */
8772 /* If first arg is constant true, return it. */
8773 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8774 return fold_convert (type, arg0);
8776 /* If either arg is constant zero, drop it. */
8777 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8778 return non_lvalue (fold_convert (type, arg1));
8779 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8780 /* Preserve sequence points. */
8781 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8782 return non_lvalue (fold_convert (type, arg0));
8783 /* If second arg is constant true, result is true, but we must
8784 evaluate first arg. */
8785 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8786 return omit_one_operand (type, arg1, arg0);
8787 /* Likewise for first arg, but note this only occurs here for
8789 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8790 return omit_one_operand (type, arg0, arg1);
8792 /* !X || X is always true. */
8793 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8794 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8795 return omit_one_operand (type, integer_one_node, arg1);
8796 /* X || !X is always true. */
8797 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8798 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8799 return omit_one_operand (type, integer_one_node, arg0);
8803 case TRUTH_XOR_EXPR:
8804 /* If the second arg is constant zero, drop it. */
8805 if (integer_zerop (arg1))
8806 return non_lvalue (fold_convert (type, arg0));
8807 /* If the second arg is constant true, this is a logical inversion. */
8808 if (integer_onep (arg1))
8810 /* Only call invert_truthvalue if operand is a truth value. */
8811 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8812 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8814 tem = invert_truthvalue (arg0);
8815 return non_lvalue (fold_convert (type, tem));
8817 /* Identical arguments cancel to zero. */
8818 if (operand_equal_p (arg0, arg1, 0))
8819 return omit_one_operand (type, integer_zero_node, arg0);
8821 /* !X ^ X is always true. */
8822 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8823 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8824 return omit_one_operand (type, integer_one_node, arg1);
8826 /* X ^ !X is always true. */
8827 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8828 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8829 return omit_one_operand (type, integer_one_node, arg0);
8839 /* If one arg is a real or integer constant, put it last. */
8840 if (tree_swap_operands_p (arg0, arg1, true))
8841 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8843 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
8844 if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
8845 && (code == NE_EXPR || code == EQ_EXPR))
8846 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8847 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8850 /* bool_var != 0 becomes bool_var. */
8851 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8853 return non_lvalue (fold_convert (type, arg0));
8855 /* bool_var == 1 becomes bool_var. */
8856 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8858 return non_lvalue (fold_convert (type, arg0));
8860 /* If this is an equality comparison of the address of a non-weak
8861 object against zero, then we know the result. */
8862 if ((code == EQ_EXPR || code == NE_EXPR)
8863 && TREE_CODE (arg0) == ADDR_EXPR
8864 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8865 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8866 && integer_zerop (arg1))
8867 return constant_boolean_node (code != EQ_EXPR, type);
8869 /* If this is an equality comparison of the address of two non-weak,
8870 unaliased symbols neither of which are extern (since we do not
8871 have access to attributes for externs), then we know the result. */
8872 if ((code == EQ_EXPR || code == NE_EXPR)
8873 && TREE_CODE (arg0) == ADDR_EXPR
8874 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8875 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8876 && ! lookup_attribute ("alias",
8877 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8878 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8879 && TREE_CODE (arg1) == ADDR_EXPR
8880 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8881 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8882 && ! lookup_attribute ("alias",
8883 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8884 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8886 /* We know that we're looking at the address of two
8887 non-weak, unaliased, static _DECL nodes.
8889 It is both wasteful and incorrect to call operand_equal_p
8890 to compare the two ADDR_EXPR nodes. It is wasteful in that
8891 all we need to do is test pointer equality for the arguments
8892 to the two ADDR_EXPR nodes. It is incorrect to use
8893 operand_equal_p as that function is NOT equivalent to a
8894 C equality test. It can in fact return false for two
8895 objects which would test as equal using the C equality
8897 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8898 return constant_boolean_node (equal
8899 ? code == EQ_EXPR : code != EQ_EXPR,
8903 /* If this is a comparison of two exprs that look like an
8904 ARRAY_REF of the same object, then we can fold this to a
8905 comparison of the two offsets. */
8906 if (TREE_CODE_CLASS (code) == tcc_comparison)
8908 tree base0, offset0, base1, offset1;
8910 if (extract_array_ref (arg0, &base0, &offset0)
8911 && extract_array_ref (arg1, &base1, &offset1)
8912 && operand_equal_p (base0, base1, 0))
8914 /* Handle no offsets on both sides specially. */
8915 if (offset0 == NULL_TREE
8916 && offset1 == NULL_TREE)
8917 return fold_build2 (code, type, integer_zero_node,
8920 if (!offset0 || !offset1
8921 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8923 if (offset0 == NULL_TREE)
8924 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8925 if (offset1 == NULL_TREE)
8926 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8927 return fold_build2 (code, type, offset0, offset1);
8932 /* Transform comparisons of the form X +- C CMP X. */
8933 if ((code != EQ_EXPR && code != NE_EXPR)
8934 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8935 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8936 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8937 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8938 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8939 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8940 && !(flag_wrapv || flag_trapv))))
8942 tree arg01 = TREE_OPERAND (arg0, 1);
8943 enum tree_code code0 = TREE_CODE (arg0);
8946 if (TREE_CODE (arg01) == REAL_CST)
8947 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8949 is_positive = tree_int_cst_sgn (arg01);
8951 /* (X - c) > X becomes false. */
8953 && ((code0 == MINUS_EXPR && is_positive >= 0)
8954 || (code0 == PLUS_EXPR && is_positive <= 0)))
8955 return constant_boolean_node (0, type);
8957 /* Likewise (X + c) < X becomes false. */
8959 && ((code0 == PLUS_EXPR && is_positive >= 0)
8960 || (code0 == MINUS_EXPR && is_positive <= 0)))
8961 return constant_boolean_node (0, type);
8963 /* Convert (X - c) <= X to true. */
8964 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8966 && ((code0 == MINUS_EXPR && is_positive >= 0)
8967 || (code0 == PLUS_EXPR && is_positive <= 0)))
8968 return constant_boolean_node (1, type);
8970 /* Convert (X + c) >= X to true. */
8971 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8973 && ((code0 == PLUS_EXPR && is_positive >= 0)
8974 || (code0 == MINUS_EXPR && is_positive <= 0)))
8975 return constant_boolean_node (1, type);
8977 if (TREE_CODE (arg01) == INTEGER_CST)
8979 /* Convert X + c > X and X - c < X to true for integers. */
8981 && ((code0 == PLUS_EXPR && is_positive > 0)
8982 || (code0 == MINUS_EXPR && is_positive < 0)))
8983 return constant_boolean_node (1, type);
8986 && ((code0 == MINUS_EXPR && is_positive > 0)
8987 || (code0 == PLUS_EXPR && is_positive < 0)))
8988 return constant_boolean_node (1, type);
8990 /* Convert X + c <= X and X - c >= X to false for integers. */
8992 && ((code0 == PLUS_EXPR && is_positive > 0)
8993 || (code0 == MINUS_EXPR && is_positive < 0)))
8994 return constant_boolean_node (0, type);
8997 && ((code0 == MINUS_EXPR && is_positive > 0)
8998 || (code0 == PLUS_EXPR && is_positive < 0)))
8999 return constant_boolean_node (0, type);
9003 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9004 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9005 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9006 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9007 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9008 && !(flag_wrapv || flag_trapv))
9009 && (TREE_CODE (arg1) == INTEGER_CST
9010 && !TREE_OVERFLOW (arg1)))
9012 tree const1 = TREE_OPERAND (arg0, 1);
9014 tree variable = TREE_OPERAND (arg0, 0);
9017 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9019 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9020 TREE_TYPE (arg1), const2, const1);
9021 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9022 && (TREE_CODE (lhs) != INTEGER_CST
9023 || !TREE_OVERFLOW (lhs)))
9024 return fold_build2 (code, type, variable, lhs);
9027 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9029 tree targ0 = strip_float_extensions (arg0);
9030 tree targ1 = strip_float_extensions (arg1);
9031 tree newtype = TREE_TYPE (targ0);
9033 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9034 newtype = TREE_TYPE (targ1);
9036 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9037 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9038 return fold_build2 (code, type, fold_convert (newtype, targ0),
9039 fold_convert (newtype, targ1));
9041 /* (-a) CMP (-b) -> b CMP a */
9042 if (TREE_CODE (arg0) == NEGATE_EXPR
9043 && TREE_CODE (arg1) == NEGATE_EXPR)
9044 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9045 TREE_OPERAND (arg0, 0));
9047 if (TREE_CODE (arg1) == REAL_CST)
9049 REAL_VALUE_TYPE cst;
9050 cst = TREE_REAL_CST (arg1);
9052 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9053 if (TREE_CODE (arg0) == NEGATE_EXPR)
9055 fold_build2 (swap_tree_comparison (code), type,
9056 TREE_OPERAND (arg0, 0),
9057 build_real (TREE_TYPE (arg1),
9058 REAL_VALUE_NEGATE (cst)));
9060 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9061 /* a CMP (-0) -> a CMP 0 */
9062 if (REAL_VALUE_MINUS_ZERO (cst))
9063 return fold_build2 (code, type, arg0,
9064 build_real (TREE_TYPE (arg1), dconst0));
9066 /* x != NaN is always true, other ops are always false. */
9067 if (REAL_VALUE_ISNAN (cst)
9068 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9070 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9071 return omit_one_operand (type, tem, arg0);
9074 /* Fold comparisons against infinity. */
9075 if (REAL_VALUE_ISINF (cst))
9077 tem = fold_inf_compare (code, type, arg0, arg1);
9078 if (tem != NULL_TREE)
9083 /* If this is a comparison of a real constant with a PLUS_EXPR
9084 or a MINUS_EXPR of a real constant, we can convert it into a
9085 comparison with a revised real constant as long as no overflow
9086 occurs when unsafe_math_optimizations are enabled. */
9087 if (flag_unsafe_math_optimizations
9088 && TREE_CODE (arg1) == REAL_CST
9089 && (TREE_CODE (arg0) == PLUS_EXPR
9090 || TREE_CODE (arg0) == MINUS_EXPR)
9091 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9092 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9093 ? MINUS_EXPR : PLUS_EXPR,
9094 arg1, TREE_OPERAND (arg0, 1), 0))
9095 && ! TREE_CONSTANT_OVERFLOW (tem))
9096 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9098 /* Likewise, we can simplify a comparison of a real constant with
9099 a MINUS_EXPR whose first operand is also a real constant, i.e.
9100 (c1 - x) < c2 becomes x > c1-c2. */
9101 if (flag_unsafe_math_optimizations
9102 && TREE_CODE (arg1) == REAL_CST
9103 && TREE_CODE (arg0) == MINUS_EXPR
9104 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9105 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9107 && ! TREE_CONSTANT_OVERFLOW (tem))
9108 return fold_build2 (swap_tree_comparison (code), type,
9109 TREE_OPERAND (arg0, 1), tem);
9111 /* Fold comparisons against built-in math functions. */
9112 if (TREE_CODE (arg1) == REAL_CST
9113 && flag_unsafe_math_optimizations
9114 && ! flag_errno_math)
9116 enum built_in_function fcode = builtin_mathfn_code (arg0);
9118 if (fcode != END_BUILTINS)
9120 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9121 if (tem != NULL_TREE)
9127 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9128 if (TREE_CONSTANT (arg1)
9129 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9130 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9131 /* This optimization is invalid for ordered comparisons
9132 if CONST+INCR overflows or if foo+incr might overflow.
9133 This optimization is invalid for floating point due to rounding.
9134 For pointer types we assume overflow doesn't happen. */
9135 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9136 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9137 && (code == EQ_EXPR || code == NE_EXPR))))
9139 tree varop, newconst;
9141 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9143 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9144 arg1, TREE_OPERAND (arg0, 1));
9145 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9146 TREE_OPERAND (arg0, 0),
9147 TREE_OPERAND (arg0, 1));
9151 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9152 arg1, TREE_OPERAND (arg0, 1));
9153 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9154 TREE_OPERAND (arg0, 0),
9155 TREE_OPERAND (arg0, 1));
9159 /* If VAROP is a reference to a bitfield, we must mask
9160 the constant by the width of the field. */
9161 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9162 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9163 && host_integerp (DECL_SIZE (TREE_OPERAND
9164 (TREE_OPERAND (varop, 0), 1)), 1))
9166 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9167 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9168 tree folded_compare, shift;
9170 /* First check whether the comparison would come out
9171 always the same. If we don't do that we would
9172 change the meaning with the masking. */
9173 folded_compare = fold_build2 (code, type,
9174 TREE_OPERAND (varop, 0), arg1);
9175 if (integer_zerop (folded_compare)
9176 || integer_onep (folded_compare))
9177 return omit_one_operand (type, folded_compare, varop);
9179 shift = build_int_cst (NULL_TREE,
9180 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9181 shift = fold_convert (TREE_TYPE (varop), shift);
9182 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9184 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9188 return fold_build2 (code, type, varop, newconst);
9191 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9192 This transformation affects the cases which are handled in later
9193 optimizations involving comparisons with non-negative constants. */
9194 if (TREE_CODE (arg1) == INTEGER_CST
9195 && TREE_CODE (arg0) != INTEGER_CST
9196 && tree_int_cst_sgn (arg1) > 0)
9201 arg1 = const_binop (MINUS_EXPR, arg1,
9202 build_int_cst (TREE_TYPE (arg1), 1), 0);
9203 return fold_build2 (GT_EXPR, type, arg0,
9204 fold_convert (TREE_TYPE (arg0), arg1));
9207 arg1 = const_binop (MINUS_EXPR, arg1,
9208 build_int_cst (TREE_TYPE (arg1), 1), 0);
9209 return fold_build2 (LE_EXPR, type, arg0,
9210 fold_convert (TREE_TYPE (arg0), arg1));
9217 /* Comparisons with the highest or lowest possible integer of
9218 the specified size will have known values. */
9220 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9222 if (TREE_CODE (arg1) == INTEGER_CST
9223 && ! TREE_CONSTANT_OVERFLOW (arg1)
9224 && width <= 2 * HOST_BITS_PER_WIDE_INT
9225 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9226 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9228 HOST_WIDE_INT signed_max_hi;
9229 unsigned HOST_WIDE_INT signed_max_lo;
9230 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9232 if (width <= HOST_BITS_PER_WIDE_INT)
9234 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9239 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9241 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9247 max_lo = signed_max_lo;
9248 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9254 width -= HOST_BITS_PER_WIDE_INT;
9256 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9261 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9263 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9268 max_hi = signed_max_hi;
9269 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9273 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9274 && TREE_INT_CST_LOW (arg1) == max_lo)
9278 return omit_one_operand (type, integer_zero_node, arg0);
9281 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9284 return omit_one_operand (type, integer_one_node, arg0);
9287 return fold_build2 (NE_EXPR, type, arg0, arg1);
9289 /* The GE_EXPR and LT_EXPR cases above are not normally
9290 reached because of previous transformations. */
9295 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9297 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9301 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9302 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9304 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9305 return fold_build2 (NE_EXPR, type, arg0, arg1);
9309 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9311 && TREE_INT_CST_LOW (arg1) == min_lo)
9315 return omit_one_operand (type, integer_zero_node, arg0);
9318 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9321 return omit_one_operand (type, integer_one_node, arg0);
9324 return fold_build2 (NE_EXPR, type, op0, op1);
9329 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9331 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9335 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9336 return fold_build2 (NE_EXPR, type, arg0, arg1);
9338 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9339 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9344 else if (!in_gimple_form
9345 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9346 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9347 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9348 /* signed_type does not work on pointer types. */
9349 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9351 /* The following case also applies to X < signed_max+1
9352 and X >= signed_max+1 because previous transformations. */
9353 if (code == LE_EXPR || code == GT_EXPR)
9356 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9357 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9358 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9359 type, fold_convert (st0, arg0),
9360 build_int_cst (st1, 0));
9366 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9367 a MINUS_EXPR of a constant, we can convert it into a comparison with
9368 a revised constant as long as no overflow occurs. */
9369 if ((code == EQ_EXPR || code == NE_EXPR)
9370 && TREE_CODE (arg1) == INTEGER_CST
9371 && (TREE_CODE (arg0) == PLUS_EXPR
9372 || TREE_CODE (arg0) == MINUS_EXPR)
9373 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9374 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9375 ? MINUS_EXPR : PLUS_EXPR,
9376 arg1, TREE_OPERAND (arg0, 1), 0))
9377 && ! TREE_CONSTANT_OVERFLOW (tem))
9378 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9380 /* Similarly for a NEGATE_EXPR. */
9381 else if ((code == EQ_EXPR || code == NE_EXPR)
9382 && TREE_CODE (arg0) == NEGATE_EXPR
9383 && TREE_CODE (arg1) == INTEGER_CST
9384 && 0 != (tem = negate_expr (arg1))
9385 && TREE_CODE (tem) == INTEGER_CST
9386 && ! TREE_CONSTANT_OVERFLOW (tem))
9387 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9389 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9390 for !=. Don't do this for ordered comparisons due to overflow. */
9391 else if ((code == NE_EXPR || code == EQ_EXPR)
9392 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9393 return fold_build2 (code, type,
9394 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9396 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9397 && (TREE_CODE (arg0) == NOP_EXPR
9398 || TREE_CODE (arg0) == CONVERT_EXPR))
9400 /* If we are widening one operand of an integer comparison,
9401 see if the other operand is similarly being widened. Perhaps we
9402 can do the comparison in the narrower type. */
9403 tem = fold_widened_comparison (code, type, arg0, arg1);
9407 /* Or if we are changing signedness. */
9408 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9413 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9414 constant, we can simplify it. */
9415 else if (TREE_CODE (arg1) == INTEGER_CST
9416 && (TREE_CODE (arg0) == MIN_EXPR
9417 || TREE_CODE (arg0) == MAX_EXPR)
9418 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9420 tem = optimize_minmax_comparison (code, type, op0, op1);
9427 /* If we are comparing an ABS_EXPR with a constant, we can
9428 convert all the cases into explicit comparisons, but they may
9429 well not be faster than doing the ABS and one comparison.
9430 But ABS (X) <= C is a range comparison, which becomes a subtraction
9431 and a comparison, and is probably faster. */
9432 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9433 && TREE_CODE (arg0) == ABS_EXPR
9434 && ! TREE_SIDE_EFFECTS (arg0)
9435 && (0 != (tem = negate_expr (arg1)))
9436 && TREE_CODE (tem) == INTEGER_CST
9437 && ! TREE_CONSTANT_OVERFLOW (tem))
9438 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9439 build2 (GE_EXPR, type,
9440 TREE_OPERAND (arg0, 0), tem),
9441 build2 (LE_EXPR, type,
9442 TREE_OPERAND (arg0, 0), arg1));
9444 /* Convert ABS_EXPR<x> >= 0 to true. */
9445 else if (code == GE_EXPR
9446 && tree_expr_nonnegative_p (arg0)
9447 && (integer_zerop (arg1)
9448 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9449 && real_zerop (arg1))))
9450 return omit_one_operand (type, integer_one_node, arg0);
9452 /* Convert ABS_EXPR<x> < 0 to false. */
9453 else if (code == LT_EXPR
9454 && tree_expr_nonnegative_p (arg0)
9455 && (integer_zerop (arg1) || real_zerop (arg1)))
9456 return omit_one_operand (type, integer_zero_node, arg0);
9458 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9459 else if ((code == EQ_EXPR || code == NE_EXPR)
9460 && TREE_CODE (arg0) == ABS_EXPR
9461 && (integer_zerop (arg1) || real_zerop (arg1)))
9462 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9464 /* If this is an EQ or NE comparison with zero and ARG0 is
9465 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9466 two operations, but the latter can be done in one less insn
9467 on machines that have only two-operand insns or on which a
9468 constant cannot be the first operand. */
9469 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9470 && TREE_CODE (arg0) == BIT_AND_EXPR)
9472 tree arg00 = TREE_OPERAND (arg0, 0);
9473 tree arg01 = TREE_OPERAND (arg0, 1);
9474 if (TREE_CODE (arg00) == LSHIFT_EXPR
9475 && integer_onep (TREE_OPERAND (arg00, 0)))
9477 fold_build2 (code, type,
9478 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9479 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9480 arg01, TREE_OPERAND (arg00, 1)),
9481 fold_convert (TREE_TYPE (arg0),
9484 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9485 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9487 fold_build2 (code, type,
9488 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9489 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9490 arg00, TREE_OPERAND (arg01, 1)),
9491 fold_convert (TREE_TYPE (arg0),
9496 /* If this is an NE or EQ comparison of zero against the result of a
9497 signed MOD operation whose second operand is a power of 2, make
9498 the MOD operation unsigned since it is simpler and equivalent. */
9499 if ((code == NE_EXPR || code == EQ_EXPR)
9500 && integer_zerop (arg1)
9501 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9502 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9503 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9504 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9505 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9506 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9508 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9509 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9510 fold_convert (newtype,
9511 TREE_OPERAND (arg0, 0)),
9512 fold_convert (newtype,
9513 TREE_OPERAND (arg0, 1)));
9515 return fold_build2 (code, type, newmod,
9516 fold_convert (newtype, arg1));
9519 /* If this is an NE comparison of zero with an AND of one, remove the
9520 comparison since the AND will give the correct value. */
9521 if (code == NE_EXPR && integer_zerop (arg1)
9522 && TREE_CODE (arg0) == BIT_AND_EXPR
9523 && integer_onep (TREE_OPERAND (arg0, 1)))
9524 return fold_convert (type, arg0);
9526 /* If we have (A & C) == C where C is a power of 2, convert this into
9527 (A & C) != 0. Similarly for NE_EXPR. */
9528 if ((code == EQ_EXPR || code == NE_EXPR)
9529 && TREE_CODE (arg0) == BIT_AND_EXPR
9530 && integer_pow2p (TREE_OPERAND (arg0, 1))
9531 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9532 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9533 arg0, fold_convert (TREE_TYPE (arg0),
9534 integer_zero_node));
9536 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9537 bit, then fold the expression into A < 0 or A >= 0. */
9538 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9542 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9543 Similarly for NE_EXPR. */
9544 if ((code == EQ_EXPR || code == NE_EXPR)
9545 && TREE_CODE (arg0) == BIT_AND_EXPR
9546 && TREE_CODE (arg1) == INTEGER_CST
9547 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9549 tree notc = fold_build1 (BIT_NOT_EXPR,
9550 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9551 TREE_OPERAND (arg0, 1));
9552 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9554 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9555 if (integer_nonzerop (dandnotc))
9556 return omit_one_operand (type, rslt, arg0);
9559 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9560 Similarly for NE_EXPR. */
9561 if ((code == EQ_EXPR || code == NE_EXPR)
9562 && TREE_CODE (arg0) == BIT_IOR_EXPR
9563 && TREE_CODE (arg1) == INTEGER_CST
9564 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9566 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9567 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9568 TREE_OPERAND (arg0, 1), notd);
9569 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9570 if (integer_nonzerop (candnotd))
9571 return omit_one_operand (type, rslt, arg0);
9574 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9575 and similarly for >= into !=. */
9576 if ((code == LT_EXPR || code == GE_EXPR)
9577 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9578 && TREE_CODE (arg1) == LSHIFT_EXPR
9579 && integer_onep (TREE_OPERAND (arg1, 0)))
9580 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9581 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9582 TREE_OPERAND (arg1, 1)),
9583 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9585 else if ((code == LT_EXPR || code == GE_EXPR)
9586 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9587 && (TREE_CODE (arg1) == NOP_EXPR
9588 || TREE_CODE (arg1) == CONVERT_EXPR)
9589 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9590 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9592 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9593 fold_convert (TREE_TYPE (arg0),
9594 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9595 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9597 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9599 /* Simplify comparison of something with itself. (For IEEE
9600 floating-point, we can only do some of these simplifications.) */
9601 if (operand_equal_p (arg0, arg1, 0))
9606 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9607 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9608 return constant_boolean_node (1, type);
9613 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9614 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9615 return constant_boolean_node (1, type);
9616 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9619 /* For NE, we can only do this simplification if integer
9620 or we don't honor IEEE floating point NaNs. */
9621 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9622 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9624 /* ... fall through ... */
9627 return constant_boolean_node (0, type);
9633 /* If we are comparing an expression that just has comparisons
9634 of two integer values, arithmetic expressions of those comparisons,
9635 and constants, we can simplify it. There are only three cases
9636 to check: the two values can either be equal, the first can be
9637 greater, or the second can be greater. Fold the expression for
9638 those three values. Since each value must be 0 or 1, we have
9639 eight possibilities, each of which corresponds to the constant 0
9640 or 1 or one of the six possible comparisons.
9642 This handles common cases like (a > b) == 0 but also handles
9643 expressions like ((x > y) - (y > x)) > 0, which supposedly
9644 occur in macroized code. */
9646 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9648 tree cval1 = 0, cval2 = 0;
9651 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9652 /* Don't handle degenerate cases here; they should already
9653 have been handled anyway. */
9654 && cval1 != 0 && cval2 != 0
9655 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9656 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9657 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9658 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9659 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9660 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9661 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9663 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9664 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9666 /* We can't just pass T to eval_subst in case cval1 or cval2
9667 was the same as ARG1. */
9670 = fold_build2 (code, type,
9671 eval_subst (arg0, cval1, maxval,
9675 = fold_build2 (code, type,
9676 eval_subst (arg0, cval1, maxval,
9680 = fold_build2 (code, type,
9681 eval_subst (arg0, cval1, minval,
9685 /* All three of these results should be 0 or 1. Confirm they
9686 are. Then use those values to select the proper code
9689 if ((integer_zerop (high_result)
9690 || integer_onep (high_result))
9691 && (integer_zerop (equal_result)
9692 || integer_onep (equal_result))
9693 && (integer_zerop (low_result)
9694 || integer_onep (low_result)))
9696 /* Make a 3-bit mask with the high-order bit being the
9697 value for `>', the next for '=', and the low for '<'. */
9698 switch ((integer_onep (high_result) * 4)
9699 + (integer_onep (equal_result) * 2)
9700 + integer_onep (low_result))
9704 return omit_one_operand (type, integer_zero_node, arg0);
9725 return omit_one_operand (type, integer_one_node, arg0);
9729 return save_expr (build2 (code, type, cval1, cval2));
9731 return fold_build2 (code, type, cval1, cval2);
9736 /* If this is a comparison of a field, we may be able to simplify it. */
9737 if (((TREE_CODE (arg0) == COMPONENT_REF
9738 && lang_hooks.can_use_bit_fields_p ())
9739 || TREE_CODE (arg0) == BIT_FIELD_REF)
9740 && (code == EQ_EXPR || code == NE_EXPR)
9741 /* Handle the constant case even without -O
9742 to make sure the warnings are given. */
9743 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9745 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9750 /* Fold a comparison of the address of COMPONENT_REFs with the same
9751 type and component to a comparison of the address of the base
9752 object. In short, &x->a OP &y->a to x OP y and
9753 &x->a OP &y.a to x OP &y */
9754 if (TREE_CODE (arg0) == ADDR_EXPR
9755 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9756 && TREE_CODE (arg1) == ADDR_EXPR
9757 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9759 tree cref0 = TREE_OPERAND (arg0, 0);
9760 tree cref1 = TREE_OPERAND (arg1, 0);
9761 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9763 tree op0 = TREE_OPERAND (cref0, 0);
9764 tree op1 = TREE_OPERAND (cref1, 0);
9765 return fold_build2 (code, type,
9766 build_fold_addr_expr (op0),
9767 build_fold_addr_expr (op1));
9771 /* Optimize comparisons of strlen vs zero to a compare of the
9772 first character of the string vs zero. To wit,
9773 strlen(ptr) == 0 => *ptr == 0
9774 strlen(ptr) != 0 => *ptr != 0
9775 Other cases should reduce to one of these two (or a constant)
9776 due to the return value of strlen being unsigned. */
9777 if ((code == EQ_EXPR || code == NE_EXPR)
9778 && integer_zerop (arg1)
9779 && TREE_CODE (arg0) == CALL_EXPR)
9781 tree fndecl = get_callee_fndecl (arg0);
9785 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9786 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9787 && (arglist = TREE_OPERAND (arg0, 1))
9788 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9789 && ! TREE_CHAIN (arglist))
9791 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9792 return fold_build2 (code, type, iref,
9793 build_int_cst (TREE_TYPE (iref), 0));
9797 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9798 into a single range test. */
9799 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9800 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9801 && TREE_CODE (arg1) == INTEGER_CST
9802 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9803 && !integer_zerop (TREE_OPERAND (arg0, 1))
9804 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9805 && !TREE_OVERFLOW (arg1))
9807 t1 = fold_div_compare (code, type, arg0, arg1);
9808 if (t1 != NULL_TREE)
9812 if ((code == EQ_EXPR || code == NE_EXPR)
9813 && integer_zerop (arg1)
9814 && tree_expr_nonzero_p (arg0))
9816 tree res = constant_boolean_node (code==NE_EXPR, type);
9817 return omit_one_operand (type, res, arg0);
9820 t1 = fold_relational_const (code, type, arg0, arg1);
9821 return t1 == NULL_TREE ? NULL_TREE : t1;
9823 case UNORDERED_EXPR:
9831 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9833 t1 = fold_relational_const (code, type, arg0, arg1);
9834 if (t1 != NULL_TREE)
9838 /* If the first operand is NaN, the result is constant. */
9839 if (TREE_CODE (arg0) == REAL_CST
9840 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9841 && (code != LTGT_EXPR || ! flag_trapping_math))
9843 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9846 return omit_one_operand (type, t1, arg1);
9849 /* If the second operand is NaN, the result is constant. */
9850 if (TREE_CODE (arg1) == REAL_CST
9851 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9852 && (code != LTGT_EXPR || ! flag_trapping_math))
9854 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9857 return omit_one_operand (type, t1, arg0);
9860 /* Simplify unordered comparison of something with itself. */
9861 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9862 && operand_equal_p (arg0, arg1, 0))
9863 return constant_boolean_node (1, type);
9865 if (code == LTGT_EXPR
9866 && !flag_trapping_math
9867 && operand_equal_p (arg0, arg1, 0))
9868 return constant_boolean_node (0, type);
9870 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9872 tree targ0 = strip_float_extensions (arg0);
9873 tree targ1 = strip_float_extensions (arg1);
9874 tree newtype = TREE_TYPE (targ0);
9876 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9877 newtype = TREE_TYPE (targ1);
9879 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9880 return fold_build2 (code, type, fold_convert (newtype, targ0),
9881 fold_convert (newtype, targ1));
9887 /* When pedantic, a compound expression can be neither an lvalue
9888 nor an integer constant expression. */
9889 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9891 /* Don't let (0, 0) be null pointer constant. */
9892 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9893 : fold_convert (type, arg1);
9894 return pedantic_non_lvalue (tem);
9898 return build_complex (type, arg0, arg1);
9902 /* An ASSERT_EXPR should never be passed to fold_binary. */
9907 } /* switch (code) */
9910 /* Callback for walk_tree, looking for LABEL_EXPR.
9911 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9912 Do not check the sub-tree of GOTO_EXPR. */
9915 contains_label_1 (tree *tp,
9917 void *data ATTRIBUTE_UNUSED)
9919 switch (TREE_CODE (*tp))
9931 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9932 accessible from outside the sub-tree. Returns NULL_TREE if no
9933 addressable label is found. */
9936 contains_label_p (tree st)
9938 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9941 /* Fold a ternary expression of code CODE and type TYPE with operands
9942 OP0, OP1, and OP2. Return the folded expression if folding is
9943 successful. Otherwise, return NULL_TREE. */
9946 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9949 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9950 enum tree_code_class kind = TREE_CODE_CLASS (code);
9952 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9953 && TREE_CODE_LENGTH (code) == 3);
9955 /* Strip any conversions that don't change the mode. This is safe
9956 for every expression, except for a comparison expression because
9957 its signedness is derived from its operands. So, in the latter
9958 case, only strip conversions that don't change the signedness.
9960 Note that this is done as an internal manipulation within the
9961 constant folder, in order to find the simplest representation of
9962 the arguments so that their form can be studied. In any cases,
9963 the appropriate type conversions should be put back in the tree
9964 that will get out of the constant folder. */
9980 if (TREE_CODE (arg0) == CONSTRUCTOR
9981 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9983 unsigned HOST_WIDE_INT idx;
9985 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9992 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9993 so all simple results must be passed through pedantic_non_lvalue. */
9994 if (TREE_CODE (arg0) == INTEGER_CST)
9996 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9997 tem = integer_zerop (arg0) ? op2 : op1;
9998 /* Only optimize constant conditions when the selected branch
9999 has the same type as the COND_EXPR. This avoids optimizing
10000 away "c ? x : throw", where the throw has a void type.
10001 Avoid throwing away that operand which contains label. */
10002 if ((!TREE_SIDE_EFFECTS (unused_op)
10003 || !contains_label_p (unused_op))
10004 && (! VOID_TYPE_P (TREE_TYPE (tem))
10005 || VOID_TYPE_P (type)))
10006 return pedantic_non_lvalue (tem);
10009 if (operand_equal_p (arg1, op2, 0))
10010 return pedantic_omit_one_operand (type, arg1, arg0);
10012 /* If we have A op B ? A : C, we may be able to convert this to a
10013 simpler expression, depending on the operation and the values
10014 of B and C. Signed zeros prevent all of these transformations,
10015 for reasons given above each one.
10017 Also try swapping the arguments and inverting the conditional. */
10018 if (COMPARISON_CLASS_P (arg0)
10019 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10020 arg1, TREE_OPERAND (arg0, 1))
10021 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10023 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10028 if (COMPARISON_CLASS_P (arg0)
10029 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10031 TREE_OPERAND (arg0, 1))
10032 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10034 tem = invert_truthvalue (arg0);
10035 if (COMPARISON_CLASS_P (tem))
10037 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10043 /* If the second operand is simpler than the third, swap them
10044 since that produces better jump optimization results. */
10045 if (truth_value_p (TREE_CODE (arg0))
10046 && tree_swap_operands_p (op1, op2, false))
10048 /* See if this can be inverted. If it can't, possibly because
10049 it was a floating-point inequality comparison, don't do
10051 tem = invert_truthvalue (arg0);
10053 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10054 return fold_build3 (code, type, tem, op2, op1);
10057 /* Convert A ? 1 : 0 to simply A. */
10058 if (integer_onep (op1)
10059 && integer_zerop (op2)
10060 /* If we try to convert OP0 to our type, the
10061 call to fold will try to move the conversion inside
10062 a COND, which will recurse. In that case, the COND_EXPR
10063 is probably the best choice, so leave it alone. */
10064 && type == TREE_TYPE (arg0))
10065 return pedantic_non_lvalue (arg0);
10067 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10068 over COND_EXPR in cases such as floating point comparisons. */
10069 if (integer_zerop (op1)
10070 && integer_onep (op2)
10071 && truth_value_p (TREE_CODE (arg0)))
10072 return pedantic_non_lvalue (fold_convert (type,
10073 invert_truthvalue (arg0)));
10075 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10076 if (TREE_CODE (arg0) == LT_EXPR
10077 && integer_zerop (TREE_OPERAND (arg0, 1))
10078 && integer_zerop (op2)
10079 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10080 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10081 TREE_TYPE (tem), tem, arg1));
10083 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10084 already handled above. */
10085 if (TREE_CODE (arg0) == BIT_AND_EXPR
10086 && integer_onep (TREE_OPERAND (arg0, 1))
10087 && integer_zerop (op2)
10088 && integer_pow2p (arg1))
10090 tree tem = TREE_OPERAND (arg0, 0);
10092 if (TREE_CODE (tem) == RSHIFT_EXPR
10093 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10094 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10095 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10096 return fold_build2 (BIT_AND_EXPR, type,
10097 TREE_OPERAND (tem, 0), arg1);
10100 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10101 is probably obsolete because the first operand should be a
10102 truth value (that's why we have the two cases above), but let's
10103 leave it in until we can confirm this for all front-ends. */
10104 if (integer_zerop (op2)
10105 && TREE_CODE (arg0) == NE_EXPR
10106 && integer_zerop (TREE_OPERAND (arg0, 1))
10107 && integer_pow2p (arg1)
10108 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10109 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10110 arg1, OEP_ONLY_CONST))
10111 return pedantic_non_lvalue (fold_convert (type,
10112 TREE_OPERAND (arg0, 0)));
10114 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10115 if (integer_zerop (op2)
10116 && truth_value_p (TREE_CODE (arg0))
10117 && truth_value_p (TREE_CODE (arg1)))
10118 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10120 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10121 if (integer_onep (op2)
10122 && truth_value_p (TREE_CODE (arg0))
10123 && truth_value_p (TREE_CODE (arg1)))
10125 /* Only perform transformation if ARG0 is easily inverted. */
10126 tem = invert_truthvalue (arg0);
10127 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10128 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10131 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10132 if (integer_zerop (arg1)
10133 && truth_value_p (TREE_CODE (arg0))
10134 && truth_value_p (TREE_CODE (op2)))
10136 /* Only perform transformation if ARG0 is easily inverted. */
10137 tem = invert_truthvalue (arg0);
10138 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10139 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10142 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10143 if (integer_onep (arg1)
10144 && truth_value_p (TREE_CODE (arg0))
10145 && truth_value_p (TREE_CODE (op2)))
10146 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10151 /* Check for a built-in function. */
10152 if (TREE_CODE (op0) == ADDR_EXPR
10153 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10154 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10155 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10158 case BIT_FIELD_REF:
10159 if (TREE_CODE (arg0) == VECTOR_CST
10160 && type == TREE_TYPE (TREE_TYPE (arg0))
10161 && host_integerp (arg1, 1)
10162 && host_integerp (op2, 1))
10164 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10165 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10168 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10169 && (idx % width) == 0
10170 && (idx = idx / width)
10171 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10173 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10174 while (idx-- > 0 && elements)
10175 elements = TREE_CHAIN (elements);
10177 return TREE_VALUE (elements);
10179 return fold_convert (type, integer_zero_node);
10186 } /* switch (code) */
10189 /* Perform constant folding and related simplification of EXPR.
10190 The related simplifications include x*1 => x, x*0 => 0, etc.,
10191 and application of the associative law.
10192 NOP_EXPR conversions may be removed freely (as long as we
10193 are careful not to change the type of the overall expression).
10194 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10195 but we can constant-fold them if they have constant operands. */
10197 #ifdef ENABLE_FOLD_CHECKING
10198 # define fold(x) fold_1 (x)
10199 static tree fold_1 (tree);
10205 const tree t = expr;
10206 enum tree_code code = TREE_CODE (t);
10207 enum tree_code_class kind = TREE_CODE_CLASS (code);
10210 /* Return right away if a constant. */
10211 if (kind == tcc_constant)
10214 if (IS_EXPR_CODE_CLASS (kind))
10216 tree type = TREE_TYPE (t);
10217 tree op0, op1, op2;
10219 switch (TREE_CODE_LENGTH (code))
10222 op0 = TREE_OPERAND (t, 0);
10223 tem = fold_unary (code, type, op0);
10224 return tem ? tem : expr;
10226 op0 = TREE_OPERAND (t, 0);
10227 op1 = TREE_OPERAND (t, 1);
10228 tem = fold_binary (code, type, op0, op1);
10229 return tem ? tem : expr;
10231 op0 = TREE_OPERAND (t, 0);
10232 op1 = TREE_OPERAND (t, 1);
10233 op2 = TREE_OPERAND (t, 2);
10234 tem = fold_ternary (code, type, op0, op1, op2);
10235 return tem ? tem : expr;
10244 return fold (DECL_INITIAL (t));
10248 } /* switch (code) */
10251 #ifdef ENABLE_FOLD_CHECKING
10254 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10255 static void fold_check_failed (tree, tree);
10256 void print_fold_checksum (tree);
10258 /* When --enable-checking=fold, compute a digest of expr before
10259 and after actual fold call to see if fold did not accidentally
10260 change original expr. */
10266 struct md5_ctx ctx;
10267 unsigned char checksum_before[16], checksum_after[16];
10270 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10271 md5_init_ctx (&ctx);
10272 fold_checksum_tree (expr, &ctx, ht);
10273 md5_finish_ctx (&ctx, checksum_before);
10276 ret = fold_1 (expr);
10278 md5_init_ctx (&ctx);
10279 fold_checksum_tree (expr, &ctx, ht);
10280 md5_finish_ctx (&ctx, checksum_after);
10283 if (memcmp (checksum_before, checksum_after, 16))
10284 fold_check_failed (expr, ret);
10290 print_fold_checksum (tree expr)
10292 struct md5_ctx ctx;
10293 unsigned char checksum[16], cnt;
10296 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10297 md5_init_ctx (&ctx);
10298 fold_checksum_tree (expr, &ctx, ht);
10299 md5_finish_ctx (&ctx, checksum);
10301 for (cnt = 0; cnt < 16; ++cnt)
10302 fprintf (stderr, "%02x", checksum[cnt]);
10303 putc ('\n', stderr);
10307 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10309 internal_error ("fold check: original tree changed by fold");
10313 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10316 enum tree_code code;
10317 char buf[sizeof (struct tree_function_decl)];
10322 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10323 <= sizeof (struct tree_function_decl))
10324 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10327 slot = htab_find_slot (ht, expr, INSERT);
10331 code = TREE_CODE (expr);
10332 if (TREE_CODE_CLASS (code) == tcc_declaration
10333 && DECL_ASSEMBLER_NAME_SET_P (expr))
10335 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10336 memcpy (buf, expr, tree_size (expr));
10338 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10340 else if (TREE_CODE_CLASS (code) == tcc_type
10341 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10342 || TYPE_CACHED_VALUES_P (expr)
10343 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10345 /* Allow these fields to be modified. */
10346 memcpy (buf, expr, tree_size (expr));
10348 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10349 TYPE_POINTER_TO (expr) = NULL;
10350 TYPE_REFERENCE_TO (expr) = NULL;
10351 if (TYPE_CACHED_VALUES_P (expr))
10353 TYPE_CACHED_VALUES_P (expr) = 0;
10354 TYPE_CACHED_VALUES (expr) = NULL;
10357 md5_process_bytes (expr, tree_size (expr), ctx);
10358 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10359 if (TREE_CODE_CLASS (code) != tcc_type
10360 && TREE_CODE_CLASS (code) != tcc_declaration
10361 && code != TREE_LIST)
10362 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10363 switch (TREE_CODE_CLASS (code))
10369 md5_process_bytes (TREE_STRING_POINTER (expr),
10370 TREE_STRING_LENGTH (expr), ctx);
10373 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10374 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10377 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10383 case tcc_exceptional:
10387 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10388 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10389 expr = TREE_CHAIN (expr);
10390 goto recursive_label;
10393 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10394 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10400 case tcc_expression:
10401 case tcc_reference:
10402 case tcc_comparison:
10405 case tcc_statement:
10406 len = TREE_CODE_LENGTH (code);
10407 for (i = 0; i < len; ++i)
10408 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10410 case tcc_declaration:
10411 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10412 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10413 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10414 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10415 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10416 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10417 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10418 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10419 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10421 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10423 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10424 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10425 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10429 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10430 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10431 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10432 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10433 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10434 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10435 if (INTEGRAL_TYPE_P (expr)
10436 || SCALAR_FLOAT_TYPE_P (expr))
10438 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10439 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10441 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10442 if (TREE_CODE (expr) == RECORD_TYPE
10443 || TREE_CODE (expr) == UNION_TYPE
10444 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10445 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10446 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10455 /* Fold a unary tree expression with code CODE of type TYPE with an
10456 operand OP0. Return a folded expression if successful. Otherwise,
10457 return a tree expression with code CODE of type TYPE with an
10461 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10464 #ifdef ENABLE_FOLD_CHECKING
10465 unsigned char checksum_before[16], checksum_after[16];
10466 struct md5_ctx ctx;
10469 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10470 md5_init_ctx (&ctx);
10471 fold_checksum_tree (op0, &ctx, ht);
10472 md5_finish_ctx (&ctx, checksum_before);
10476 tem = fold_unary (code, type, op0);
10478 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10480 #ifdef ENABLE_FOLD_CHECKING
10481 md5_init_ctx (&ctx);
10482 fold_checksum_tree (op0, &ctx, ht);
10483 md5_finish_ctx (&ctx, checksum_after);
10486 if (memcmp (checksum_before, checksum_after, 16))
10487 fold_check_failed (op0, tem);
10492 /* Fold a binary tree expression with code CODE of type TYPE with
10493 operands OP0 and OP1. Return a folded expression if successful.
10494 Otherwise, return a tree expression with code CODE of type TYPE
10495 with operands OP0 and OP1. */
10498 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10502 #ifdef ENABLE_FOLD_CHECKING
10503 unsigned char checksum_before_op0[16],
10504 checksum_before_op1[16],
10505 checksum_after_op0[16],
10506 checksum_after_op1[16];
10507 struct md5_ctx ctx;
10510 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10511 md5_init_ctx (&ctx);
10512 fold_checksum_tree (op0, &ctx, ht);
10513 md5_finish_ctx (&ctx, checksum_before_op0);
10516 md5_init_ctx (&ctx);
10517 fold_checksum_tree (op1, &ctx, ht);
10518 md5_finish_ctx (&ctx, checksum_before_op1);
10522 tem = fold_binary (code, type, op0, op1);
10524 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10526 #ifdef ENABLE_FOLD_CHECKING
10527 md5_init_ctx (&ctx);
10528 fold_checksum_tree (op0, &ctx, ht);
10529 md5_finish_ctx (&ctx, checksum_after_op0);
10532 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10533 fold_check_failed (op0, tem);
10535 md5_init_ctx (&ctx);
10536 fold_checksum_tree (op1, &ctx, ht);
10537 md5_finish_ctx (&ctx, checksum_after_op1);
10540 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10541 fold_check_failed (op1, tem);
10546 /* Fold a ternary tree expression with code CODE of type TYPE with
10547 operands OP0, OP1, and OP2. Return a folded expression if
10548 successful. Otherwise, return a tree expression with code CODE of
10549 type TYPE with operands OP0, OP1, and OP2. */
10552 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10556 #ifdef ENABLE_FOLD_CHECKING
10557 unsigned char checksum_before_op0[16],
10558 checksum_before_op1[16],
10559 checksum_before_op2[16],
10560 checksum_after_op0[16],
10561 checksum_after_op1[16],
10562 checksum_after_op2[16];
10563 struct md5_ctx ctx;
10566 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10567 md5_init_ctx (&ctx);
10568 fold_checksum_tree (op0, &ctx, ht);
10569 md5_finish_ctx (&ctx, checksum_before_op0);
10572 md5_init_ctx (&ctx);
10573 fold_checksum_tree (op1, &ctx, ht);
10574 md5_finish_ctx (&ctx, checksum_before_op1);
10577 md5_init_ctx (&ctx);
10578 fold_checksum_tree (op2, &ctx, ht);
10579 md5_finish_ctx (&ctx, checksum_before_op2);
10583 tem = fold_ternary (code, type, op0, op1, op2);
10585 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10587 #ifdef ENABLE_FOLD_CHECKING
10588 md5_init_ctx (&ctx);
10589 fold_checksum_tree (op0, &ctx, ht);
10590 md5_finish_ctx (&ctx, checksum_after_op0);
10593 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10594 fold_check_failed (op0, tem);
10596 md5_init_ctx (&ctx);
10597 fold_checksum_tree (op1, &ctx, ht);
10598 md5_finish_ctx (&ctx, checksum_after_op1);
10601 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10602 fold_check_failed (op1, tem);
10604 md5_init_ctx (&ctx);
10605 fold_checksum_tree (op2, &ctx, ht);
10606 md5_finish_ctx (&ctx, checksum_after_op2);
10609 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10610 fold_check_failed (op2, tem);
10615 /* Perform constant folding and related simplification of initializer
10616 expression EXPR. These behave identically to "fold_buildN" but ignore
10617 potential run-time traps and exceptions that fold must preserve. */
10619 #define START_FOLD_INIT \
10620 int saved_signaling_nans = flag_signaling_nans;\
10621 int saved_trapping_math = flag_trapping_math;\
10622 int saved_rounding_math = flag_rounding_math;\
10623 int saved_trapv = flag_trapv;\
10624 flag_signaling_nans = 0;\
10625 flag_trapping_math = 0;\
10626 flag_rounding_math = 0;\
10629 #define END_FOLD_INIT \
10630 flag_signaling_nans = saved_signaling_nans;\
10631 flag_trapping_math = saved_trapping_math;\
10632 flag_rounding_math = saved_rounding_math;\
10633 flag_trapv = saved_trapv
10636 fold_build1_initializer (enum tree_code code, tree type, tree op)
10641 result = fold_build1 (code, type, op);
10648 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10653 result = fold_build2 (code, type, op0, op1);
10660 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10666 result = fold_build3 (code, type, op0, op1, op2);
10672 #undef START_FOLD_INIT
10673 #undef END_FOLD_INIT
10675 /* Determine if first argument is a multiple of second argument. Return 0 if
10676 it is not, or we cannot easily determined it to be.
10678 An example of the sort of thing we care about (at this point; this routine
10679 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10680 fold cases do now) is discovering that
10682 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10688 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10690 This code also handles discovering that
10692 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10694 is a multiple of 8 so we don't have to worry about dealing with a
10695 possible remainder.
10697 Note that we *look* inside a SAVE_EXPR only to determine how it was
10698 calculated; it is not safe for fold to do much of anything else with the
10699 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10700 at run time. For example, the latter example above *cannot* be implemented
10701 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10702 evaluation time of the original SAVE_EXPR is not necessarily the same at
10703 the time the new expression is evaluated. The only optimization of this
10704 sort that would be valid is changing
10706 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10710 SAVE_EXPR (I) * SAVE_EXPR (J)
10712 (where the same SAVE_EXPR (J) is used in the original and the
10713 transformed version). */
10716 multiple_of_p (tree type, tree top, tree bottom)
10718 if (operand_equal_p (top, bottom, 0))
10721 if (TREE_CODE (type) != INTEGER_TYPE)
10724 switch (TREE_CODE (top))
10727 /* Bitwise and provides a power of two multiple. If the mask is
10728 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10729 if (!integer_pow2p (bottom))
10734 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10735 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10739 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10740 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10743 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10747 op1 = TREE_OPERAND (top, 1);
10748 /* const_binop may not detect overflow correctly,
10749 so check for it explicitly here. */
10750 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10751 > TREE_INT_CST_LOW (op1)
10752 && TREE_INT_CST_HIGH (op1) == 0
10753 && 0 != (t1 = fold_convert (type,
10754 const_binop (LSHIFT_EXPR,
10757 && ! TREE_OVERFLOW (t1))
10758 return multiple_of_p (type, t1, bottom);
10763 /* Can't handle conversions from non-integral or wider integral type. */
10764 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10765 || (TYPE_PRECISION (type)
10766 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10769 /* .. fall through ... */
10772 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10775 if (TREE_CODE (bottom) != INTEGER_CST
10776 || (TYPE_UNSIGNED (type)
10777 && (tree_int_cst_sgn (top) < 0
10778 || tree_int_cst_sgn (bottom) < 0)))
10780 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10788 /* Return true if `t' is known to be non-negative. */
10791 tree_expr_nonnegative_p (tree t)
10793 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10796 switch (TREE_CODE (t))
10799 /* We can't return 1 if flag_wrapv is set because
10800 ABS_EXPR<INT_MIN> = INT_MIN. */
10801 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10806 return tree_int_cst_sgn (t) >= 0;
10809 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10812 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10813 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10814 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10816 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10817 both unsigned and at least 2 bits shorter than the result. */
10818 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10819 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10820 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10822 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10823 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10824 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10825 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10827 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10828 TYPE_PRECISION (inner2)) + 1;
10829 return prec < TYPE_PRECISION (TREE_TYPE (t));
10835 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10837 /* x * x for floating point x is always non-negative. */
10838 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10840 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10841 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10844 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10845 both unsigned and their total bits is shorter than the result. */
10846 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10847 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10848 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10850 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10851 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10852 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10853 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10854 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10855 < TYPE_PRECISION (TREE_TYPE (t));
10861 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10862 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10868 case TRUNC_DIV_EXPR:
10869 case CEIL_DIV_EXPR:
10870 case FLOOR_DIV_EXPR:
10871 case ROUND_DIV_EXPR:
10872 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10873 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10875 case TRUNC_MOD_EXPR:
10876 case CEIL_MOD_EXPR:
10877 case FLOOR_MOD_EXPR:
10878 case ROUND_MOD_EXPR:
10880 case NON_LVALUE_EXPR:
10882 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10884 case COMPOUND_EXPR:
10886 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10889 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10892 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10893 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10897 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10898 tree outer_type = TREE_TYPE (t);
10900 if (TREE_CODE (outer_type) == REAL_TYPE)
10902 if (TREE_CODE (inner_type) == REAL_TYPE)
10903 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10904 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10906 if (TYPE_UNSIGNED (inner_type))
10908 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10911 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10913 if (TREE_CODE (inner_type) == REAL_TYPE)
10914 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10915 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10916 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10917 && TYPE_UNSIGNED (inner_type);
10924 tree temp = TARGET_EXPR_SLOT (t);
10925 t = TARGET_EXPR_INITIAL (t);
10927 /* If the initializer is non-void, then it's a normal expression
10928 that will be assigned to the slot. */
10929 if (!VOID_TYPE_P (t))
10930 return tree_expr_nonnegative_p (t);
10932 /* Otherwise, the initializer sets the slot in some way. One common
10933 way is an assignment statement at the end of the initializer. */
10936 if (TREE_CODE (t) == BIND_EXPR)
10937 t = expr_last (BIND_EXPR_BODY (t));
10938 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10939 || TREE_CODE (t) == TRY_CATCH_EXPR)
10940 t = expr_last (TREE_OPERAND (t, 0));
10941 else if (TREE_CODE (t) == STATEMENT_LIST)
10946 if (TREE_CODE (t) == MODIFY_EXPR
10947 && TREE_OPERAND (t, 0) == temp)
10948 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10955 tree fndecl = get_callee_fndecl (t);
10956 tree arglist = TREE_OPERAND (t, 1);
10957 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10958 switch (DECL_FUNCTION_CODE (fndecl))
10960 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10961 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10962 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10963 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10965 CASE_BUILTIN_F (BUILT_IN_ACOS)
10966 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10967 CASE_BUILTIN_F (BUILT_IN_CABS)
10968 CASE_BUILTIN_F (BUILT_IN_COSH)
10969 CASE_BUILTIN_F (BUILT_IN_ERFC)
10970 CASE_BUILTIN_F (BUILT_IN_EXP)
10971 CASE_BUILTIN_F (BUILT_IN_EXP10)
10972 CASE_BUILTIN_F (BUILT_IN_EXP2)
10973 CASE_BUILTIN_F (BUILT_IN_FABS)
10974 CASE_BUILTIN_F (BUILT_IN_FDIM)
10975 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10976 CASE_BUILTIN_F (BUILT_IN_POW10)
10977 CASE_BUILTIN_I (BUILT_IN_FFS)
10978 CASE_BUILTIN_I (BUILT_IN_PARITY)
10979 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10983 CASE_BUILTIN_F (BUILT_IN_SQRT)
10984 /* sqrt(-0.0) is -0.0. */
10985 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10987 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10989 CASE_BUILTIN_F (BUILT_IN_ASINH)
10990 CASE_BUILTIN_F (BUILT_IN_ATAN)
10991 CASE_BUILTIN_F (BUILT_IN_ATANH)
10992 CASE_BUILTIN_F (BUILT_IN_CBRT)
10993 CASE_BUILTIN_F (BUILT_IN_CEIL)
10994 CASE_BUILTIN_F (BUILT_IN_ERF)
10995 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10996 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10997 CASE_BUILTIN_F (BUILT_IN_FMOD)
10998 CASE_BUILTIN_F (BUILT_IN_FREXP)
10999 CASE_BUILTIN_F (BUILT_IN_LCEIL)
11000 CASE_BUILTIN_F (BUILT_IN_LDEXP)
11001 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
11002 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
11003 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
11004 CASE_BUILTIN_F (BUILT_IN_LLRINT)
11005 CASE_BUILTIN_F (BUILT_IN_LLROUND)
11006 CASE_BUILTIN_F (BUILT_IN_LRINT)
11007 CASE_BUILTIN_F (BUILT_IN_LROUND)
11008 CASE_BUILTIN_F (BUILT_IN_MODF)
11009 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
11010 CASE_BUILTIN_F (BUILT_IN_POW)
11011 CASE_BUILTIN_F (BUILT_IN_RINT)
11012 CASE_BUILTIN_F (BUILT_IN_ROUND)
11013 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11014 CASE_BUILTIN_F (BUILT_IN_SINH)
11015 CASE_BUILTIN_F (BUILT_IN_TANH)
11016 CASE_BUILTIN_F (BUILT_IN_TRUNC)
11017 /* True if the 1st argument is nonnegative. */
11018 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11020 CASE_BUILTIN_F (BUILT_IN_FMAX)
11021 /* True if the 1st OR 2nd arguments are nonnegative. */
11022 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11023 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11025 CASE_BUILTIN_F (BUILT_IN_FMIN)
11026 /* True if the 1st AND 2nd arguments are nonnegative. */
11027 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11028 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11030 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11031 /* True if the 2nd argument is nonnegative. */
11032 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11036 #undef CASE_BUILTIN_F
11037 #undef CASE_BUILTIN_I
11041 /* ... fall through ... */
11044 if (truth_value_p (TREE_CODE (t)))
11045 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11049 /* We don't know sign of `t', so be conservative and return false. */
11053 /* Return true when T is an address and is known to be nonzero.
11054 For floating point we further ensure that T is not denormal.
11055 Similar logic is present in nonzero_address in rtlanal.h. */
11058 tree_expr_nonzero_p (tree t)
11060 tree type = TREE_TYPE (t);
11062 /* Doing something useful for floating point would need more work. */
11063 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11066 switch (TREE_CODE (t))
11069 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11072 /* We used to test for !integer_zerop here. This does not work correctly
11073 if TREE_CONSTANT_OVERFLOW (t). */
11074 return (TREE_INT_CST_LOW (t) != 0
11075 || TREE_INT_CST_HIGH (t) != 0);
11078 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11080 /* With the presence of negative values it is hard
11081 to say something. */
11082 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11083 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11085 /* One of operands must be positive and the other non-negative. */
11086 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11087 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11092 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11094 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11095 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11101 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11102 tree outer_type = TREE_TYPE (t);
11104 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11105 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11111 tree base = get_base_address (TREE_OPERAND (t, 0));
11116 /* Weak declarations may link to NULL. */
11117 if (VAR_OR_FUNCTION_DECL_P (base))
11118 return !DECL_WEAK (base);
11120 /* Constants are never weak. */
11121 if (CONSTANT_CLASS_P (base))
11128 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11129 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11132 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11133 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11136 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11138 /* When both operands are nonzero, then MAX must be too. */
11139 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11142 /* MAX where operand 0 is positive is positive. */
11143 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11145 /* MAX where operand 1 is positive is positive. */
11146 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11147 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11151 case COMPOUND_EXPR:
11154 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11157 case NON_LVALUE_EXPR:
11158 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11161 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11162 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11165 return alloca_call_p (t);
11173 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11174 attempt to fold the expression to a constant without modifying TYPE,
11177 If the expression could be simplified to a constant, then return
11178 the constant. If the expression would not be simplified to a
11179 constant, then return NULL_TREE. */
11182 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11184 tree tem = fold_binary (code, type, op0, op1);
11185 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11188 /* Given the components of a unary expression CODE, TYPE and OP0,
11189 attempt to fold the expression to a constant without modifying
11192 If the expression could be simplified to a constant, then return
11193 the constant. If the expression would not be simplified to a
11194 constant, then return NULL_TREE. */
11197 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11199 tree tem = fold_unary (code, type, op0);
11200 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11203 /* If EXP represents referencing an element in a constant string
11204 (either via pointer arithmetic or array indexing), return the
11205 tree representing the value accessed, otherwise return NULL. */
11208 fold_read_from_constant_string (tree exp)
11210 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11212 tree exp1 = TREE_OPERAND (exp, 0);
11216 if (TREE_CODE (exp) == INDIRECT_REF)
11217 string = string_constant (exp1, &index);
11220 tree low_bound = array_ref_low_bound (exp);
11221 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11223 /* Optimize the special-case of a zero lower bound.
11225 We convert the low_bound to sizetype to avoid some problems
11226 with constant folding. (E.g. suppose the lower bound is 1,
11227 and its mode is QI. Without the conversion,l (ARRAY
11228 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11229 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11230 if (! integer_zerop (low_bound))
11231 index = size_diffop (index, fold_convert (sizetype, low_bound));
11237 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11238 && TREE_CODE (string) == STRING_CST
11239 && TREE_CODE (index) == INTEGER_CST
11240 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11241 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11243 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11244 return fold_convert (TREE_TYPE (exp),
11245 build_int_cst (NULL_TREE,
11246 (TREE_STRING_POINTER (string)
11247 [TREE_INT_CST_LOW (index)])));
11252 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11253 an integer constant or real constant.
11255 TYPE is the type of the result. */
11258 fold_negate_const (tree arg0, tree type)
11260 tree t = NULL_TREE;
11262 switch (TREE_CODE (arg0))
11266 unsigned HOST_WIDE_INT low;
11267 HOST_WIDE_INT high;
11268 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11269 TREE_INT_CST_HIGH (arg0),
11271 t = build_int_cst_wide (type, low, high);
11272 t = force_fit_type (t, 1,
11273 (overflow | TREE_OVERFLOW (arg0))
11274 && !TYPE_UNSIGNED (type),
11275 TREE_CONSTANT_OVERFLOW (arg0));
11280 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11284 gcc_unreachable ();
11290 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11291 an integer constant or real constant.
11293 TYPE is the type of the result. */
11296 fold_abs_const (tree arg0, tree type)
11298 tree t = NULL_TREE;
11300 switch (TREE_CODE (arg0))
11303 /* If the value is unsigned, then the absolute value is
11304 the same as the ordinary value. */
11305 if (TYPE_UNSIGNED (type))
11307 /* Similarly, if the value is non-negative. */
11308 else if (INT_CST_LT (integer_minus_one_node, arg0))
11310 /* If the value is negative, then the absolute value is
11314 unsigned HOST_WIDE_INT low;
11315 HOST_WIDE_INT high;
11316 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11317 TREE_INT_CST_HIGH (arg0),
11319 t = build_int_cst_wide (type, low, high);
11320 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11321 TREE_CONSTANT_OVERFLOW (arg0));
11326 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11327 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11333 gcc_unreachable ();
11339 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11340 constant. TYPE is the type of the result. */
11343 fold_not_const (tree arg0, tree type)
11345 tree t = NULL_TREE;
11347 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11349 t = build_int_cst_wide (type,
11350 ~ TREE_INT_CST_LOW (arg0),
11351 ~ TREE_INT_CST_HIGH (arg0));
11352 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11353 TREE_CONSTANT_OVERFLOW (arg0));
11358 /* Given CODE, a relational operator, the target type, TYPE and two
11359 constant operands OP0 and OP1, return the result of the
11360 relational operation. If the result is not a compile time
11361 constant, then return NULL_TREE. */
11364 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11366 int result, invert;
11368 /* From here on, the only cases we handle are when the result is
11369 known to be a constant. */
11371 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11373 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11374 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11376 /* Handle the cases where either operand is a NaN. */
11377 if (real_isnan (c0) || real_isnan (c1))
11387 case UNORDERED_EXPR:
11401 if (flag_trapping_math)
11407 gcc_unreachable ();
11410 return constant_boolean_node (result, type);
11413 return constant_boolean_node (real_compare (code, c0, c1), type);
11416 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11418 To compute GT, swap the arguments and do LT.
11419 To compute GE, do LT and invert the result.
11420 To compute LE, swap the arguments, do LT and invert the result.
11421 To compute NE, do EQ and invert the result.
11423 Therefore, the code below must handle only EQ and LT. */
11425 if (code == LE_EXPR || code == GT_EXPR)
11430 code = swap_tree_comparison (code);
11433 /* Note that it is safe to invert for real values here because we
11434 have already handled the one case that it matters. */
11437 if (code == NE_EXPR || code == GE_EXPR)
11440 code = invert_tree_comparison (code, false);
11443 /* Compute a result for LT or EQ if args permit;
11444 Otherwise return T. */
11445 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11447 if (code == EQ_EXPR)
11448 result = tree_int_cst_equal (op0, op1);
11449 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11450 result = INT_CST_LT_UNSIGNED (op0, op1);
11452 result = INT_CST_LT (op0, op1);
11459 return constant_boolean_node (result, type);
11462 /* Build an expression for the a clean point containing EXPR with type TYPE.
11463 Don't build a cleanup point expression for EXPR which don't have side
11467 fold_build_cleanup_point_expr (tree type, tree expr)
11469 /* If the expression does not have side effects then we don't have to wrap
11470 it with a cleanup point expression. */
11471 if (!TREE_SIDE_EFFECTS (expr))
11474 /* If the expression is a return, check to see if the expression inside the
11475 return has no side effects or the right hand side of the modify expression
11476 inside the return. If either don't have side effects set we don't need to
11477 wrap the expression in a cleanup point expression. Note we don't check the
11478 left hand side of the modify because it should always be a return decl. */
11479 if (TREE_CODE (expr) == RETURN_EXPR)
11481 tree op = TREE_OPERAND (expr, 0);
11482 if (!op || !TREE_SIDE_EFFECTS (op))
11484 op = TREE_OPERAND (op, 1);
11485 if (!TREE_SIDE_EFFECTS (op))
11489 return build1 (CLEANUP_POINT_EXPR, type, expr);
11492 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11493 avoid confusing the gimplify process. */
11496 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11498 /* The size of the object is not relevant when talking about its address. */
11499 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11500 t = TREE_OPERAND (t, 0);
11502 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11503 if (TREE_CODE (t) == INDIRECT_REF
11504 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11506 t = TREE_OPERAND (t, 0);
11507 if (TREE_TYPE (t) != ptrtype)
11508 t = build1 (NOP_EXPR, ptrtype, t);
11514 while (handled_component_p (base))
11515 base = TREE_OPERAND (base, 0);
11517 TREE_ADDRESSABLE (base) = 1;
11519 t = build1 (ADDR_EXPR, ptrtype, t);
11526 build_fold_addr_expr (tree t)
11528 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11531 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11532 of an indirection through OP0, or NULL_TREE if no simplification is
11536 fold_indirect_ref_1 (tree type, tree op0)
11542 subtype = TREE_TYPE (sub);
11543 if (!POINTER_TYPE_P (subtype))
11546 if (TREE_CODE (sub) == ADDR_EXPR)
11548 tree op = TREE_OPERAND (sub, 0);
11549 tree optype = TREE_TYPE (op);
11550 /* *&p => p; make sure to handle *&"str"[cst] here. */
11551 if (type == optype)
11553 tree fop = fold_read_from_constant_string (op);
11559 /* *(foo *)&fooarray => fooarray[0] */
11560 else if (TREE_CODE (optype) == ARRAY_TYPE
11561 && type == TREE_TYPE (optype))
11563 tree type_domain = TYPE_DOMAIN (optype);
11564 tree min_val = size_zero_node;
11565 if (type_domain && TYPE_MIN_VALUE (type_domain))
11566 min_val = TYPE_MIN_VALUE (type_domain);
11567 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11571 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11572 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11573 && type == TREE_TYPE (TREE_TYPE (subtype)))
11576 tree min_val = size_zero_node;
11577 sub = build_fold_indirect_ref (sub);
11578 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11579 if (type_domain && TYPE_MIN_VALUE (type_domain))
11580 min_val = TYPE_MIN_VALUE (type_domain);
11581 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11587 /* Builds an expression for an indirection through T, simplifying some
11591 build_fold_indirect_ref (tree t)
11593 tree type = TREE_TYPE (TREE_TYPE (t));
11594 tree sub = fold_indirect_ref_1 (type, t);
11599 return build1 (INDIRECT_REF, type, t);
11602 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11605 fold_indirect_ref (tree t)
11607 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11615 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11616 whose result is ignored. The type of the returned tree need not be
11617 the same as the original expression. */
11620 fold_ignored_result (tree t)
11622 if (!TREE_SIDE_EFFECTS (t))
11623 return integer_zero_node;
11626 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11629 t = TREE_OPERAND (t, 0);
11633 case tcc_comparison:
11634 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11635 t = TREE_OPERAND (t, 0);
11636 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11637 t = TREE_OPERAND (t, 1);
11642 case tcc_expression:
11643 switch (TREE_CODE (t))
11645 case COMPOUND_EXPR:
11646 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11648 t = TREE_OPERAND (t, 0);
11652 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11653 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11655 t = TREE_OPERAND (t, 0);
11668 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11669 This can only be applied to objects of a sizetype. */
11672 round_up (tree value, int divisor)
11674 tree div = NULL_TREE;
11676 gcc_assert (divisor > 0);
11680 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11681 have to do anything. Only do this when we are not given a const,
11682 because in that case, this check is more expensive than just
11684 if (TREE_CODE (value) != INTEGER_CST)
11686 div = build_int_cst (TREE_TYPE (value), divisor);
11688 if (multiple_of_p (TREE_TYPE (value), value, div))
11692 /* If divisor is a power of two, simplify this to bit manipulation. */
11693 if (divisor == (divisor & -divisor))
11697 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11698 value = size_binop (PLUS_EXPR, value, t);
11699 t = build_int_cst (TREE_TYPE (value), -divisor);
11700 value = size_binop (BIT_AND_EXPR, value, t);
11705 div = build_int_cst (TREE_TYPE (value), divisor);
11706 value = size_binop (CEIL_DIV_EXPR, value, div);
11707 value = size_binop (MULT_EXPR, value, div);
11713 /* Likewise, but round down. */
11716 round_down (tree value, int divisor)
11718 tree div = NULL_TREE;
11720 gcc_assert (divisor > 0);
11724 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11725 have to do anything. Only do this when we are not given a const,
11726 because in that case, this check is more expensive than just
11728 if (TREE_CODE (value) != INTEGER_CST)
11730 div = build_int_cst (TREE_TYPE (value), divisor);
11732 if (multiple_of_p (TREE_TYPE (value), value, div))
11736 /* If divisor is a power of two, simplify this to bit manipulation. */
11737 if (divisor == (divisor & -divisor))
11741 t = build_int_cst (TREE_TYPE (value), -divisor);
11742 value = size_binop (BIT_AND_EXPR, value, t);
11747 div = build_int_cst (TREE_TYPE (value), divisor);
11748 value = size_binop (FLOOR_DIV_EXPR, value, div);
11749 value = size_binop (MULT_EXPR, value, div);
11755 /* Returns the pointer to the base of the object addressed by EXP and
11756 extracts the information about the offset of the access, storing it
11757 to PBITPOS and POFFSET. */
11760 split_address_to_core_and_offset (tree exp,
11761 HOST_WIDE_INT *pbitpos, tree *poffset)
11764 enum machine_mode mode;
11765 int unsignedp, volatilep;
11766 HOST_WIDE_INT bitsize;
11768 if (TREE_CODE (exp) == ADDR_EXPR)
11770 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11771 poffset, &mode, &unsignedp, &volatilep,
11773 core = build_fold_addr_expr (core);
11779 *poffset = NULL_TREE;
11785 /* Returns true if addresses of E1 and E2 differ by a constant, false
11786 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11789 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11792 HOST_WIDE_INT bitpos1, bitpos2;
11793 tree toffset1, toffset2, tdiff, type;
11795 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11796 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11798 if (bitpos1 % BITS_PER_UNIT != 0
11799 || bitpos2 % BITS_PER_UNIT != 0
11800 || !operand_equal_p (core1, core2, 0))
11803 if (toffset1 && toffset2)
11805 type = TREE_TYPE (toffset1);
11806 if (type != TREE_TYPE (toffset2))
11807 toffset2 = fold_convert (type, toffset2);
11809 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11810 if (!cst_and_fits_in_hwi (tdiff))
11813 *diff = int_cst_value (tdiff);
11815 else if (toffset1 || toffset2)
11817 /* If only one of the offsets is non-constant, the difference cannot
11824 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11828 /* Simplify the floating point expression EXP when the sign of the
11829 result is not significant. Return NULL_TREE if no simplification
11833 fold_strip_sign_ops (tree exp)
11837 switch (TREE_CODE (exp))
11841 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11842 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11846 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11848 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11849 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11850 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11851 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11852 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11853 arg1 ? arg1 : TREE_OPERAND (exp, 1));