1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 || (overflowable > 0 && sign_extended_type))
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
277 TREE_CONSTANT_OVERFLOW (t) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
298 h = h1 + h2 + (l < l1);
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 return (*hv & h1) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
355 for (j = 0; j < 4; j++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 if (SHIFT_COUNT_TRUNCATED)
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 unsigned HOST_WIDE_INT signmask;
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 if (SHIFT_COUNT_TRUNCATED)
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
663 { /* scale divisor and dividend */
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
742 decode (quo, lquo, hquo);
745 /* If result is negative, make it so. */
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, <wice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code)
888 /* Check whether we may negate an integer constant T without causing
892 may_negate_without_overflow_p (tree t)
894 unsigned HOST_WIDE_INT val;
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
907 if (TREE_INT_CST_LOW (t) != 0)
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
923 negate_expr_p (tree t)
930 type = TREE_TYPE (t);
933 switch (TREE_CODE (t))
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
983 tree tem = strip_float_extensions (t);
985 return negate_expr_p (tem);
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1017 negate_expr (tree t)
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1028 switch (TREE_CODE (t))
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1116 TREE_OPERAND (t, 1)));
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1135 tree fndecl, arg, arglist;
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1238 var = op1, neg_var_p = neg1_p;
1240 /* Now do any needed negations. */
1242 *minus_litp = *litp, *litp = 0;
1244 *conp = negate_expr (*conp);
1246 var = negate_expr (var);
1248 else if (TREE_CONSTANT (in))
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1284 if (code == PLUS_EXPR)
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1295 else if (code == MINUS_EXPR)
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1338 low = int1l | int2l, hi = int1h | int2h;
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1346 low = int1l & int2l, hi = int1h & int2h;
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1389 if (code == CEIL_DIV_EXPR)
1392 low = int1l / int2l, hi = 0;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1401 low = int1l, hi = int1h;
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1422 if (code == CEIL_MOD_EXPR)
1424 low = int1l % int2l, hi = 0;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1451 low = int2l, hi = int2h;
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1501 if (TREE_CODE (arg1) == REAL_CST)
1503 enum machine_mode mode;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1534 else if (REAL_VALUE_ISNAN (d2))
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1540 /* Don't constant fold this floating point operation if the
1541 result may dependent upon the run-time rounding mode and
1542 flag_rounding_math is set, or if GCC's software emulation
1543 is unable to accurately represent the result. */
1545 if ((flag_rounding_math
1546 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1547 && !flag_unsafe_math_optimizations))
1548 && (inexact || !real_identical (&result, &value)))
1551 t = build_real (type, result);
1553 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1554 TREE_CONSTANT_OVERFLOW (t)
1556 | TREE_CONSTANT_OVERFLOW (arg1)
1557 | TREE_CONSTANT_OVERFLOW (arg2);
1560 if (TREE_CODE (arg1) == COMPLEX_CST)
1562 tree type = TREE_TYPE (arg1);
1563 tree r1 = TREE_REALPART (arg1);
1564 tree i1 = TREE_IMAGPART (arg1);
1565 tree r2 = TREE_REALPART (arg2);
1566 tree i2 = TREE_IMAGPART (arg2);
1572 t = build_complex (type,
1573 const_binop (PLUS_EXPR, r1, r2, notrunc),
1574 const_binop (PLUS_EXPR, i1, i2, notrunc));
1578 t = build_complex (type,
1579 const_binop (MINUS_EXPR, r1, r2, notrunc),
1580 const_binop (MINUS_EXPR, i1, i2, notrunc));
1584 t = build_complex (type,
1585 const_binop (MINUS_EXPR,
1586 const_binop (MULT_EXPR,
1588 const_binop (MULT_EXPR,
1591 const_binop (PLUS_EXPR,
1592 const_binop (MULT_EXPR,
1594 const_binop (MULT_EXPR,
1601 tree t1, t2, real, imag;
1603 = const_binop (PLUS_EXPR,
1604 const_binop (MULT_EXPR, r2, r2, notrunc),
1605 const_binop (MULT_EXPR, i2, i2, notrunc),
1608 t1 = const_binop (PLUS_EXPR,
1609 const_binop (MULT_EXPR, r1, r2, notrunc),
1610 const_binop (MULT_EXPR, i1, i2, notrunc),
1612 t2 = const_binop (MINUS_EXPR,
1613 const_binop (MULT_EXPR, i1, r2, notrunc),
1614 const_binop (MULT_EXPR, r1, i2, notrunc),
1617 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1619 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1620 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1624 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1625 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1630 t = build_complex (type, real, imag);
1642 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1643 indicates which particular sizetype to create. */
1646 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1648 return build_int_cst (sizetype_tab[(int) kind], number);
1651 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1652 is a tree code. The type of the result is taken from the operands.
1653 Both must be the same type integer type and it must be a size type.
1654 If the operands are constant, so is the result. */
1657 size_binop (enum tree_code code, tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1661 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1662 && type == TREE_TYPE (arg1));
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1680 if (arg0 == error_mark_node || arg1 == error_mark_node)
1681 return error_mark_node;
1683 return fold_build2 (code, type, arg0, arg1);
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1691 size_diffop (tree arg0, tree arg1)
1693 tree type = TREE_TYPE (arg0);
1696 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1697 && type == TREE_TYPE (arg1));
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type))
1701 return size_binop (MINUS_EXPR, arg0, arg1);
1703 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1726 /* A subroutine of fold_convert_const handling conversions of an
1727 INTEGER_CST to another integer type. */
1730 fold_convert_const_int_from_int (tree type, tree arg1)
1734 /* Given an integer constant, make new constant with new type,
1735 appropriately sign-extended or truncated. */
1736 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1737 TREE_INT_CST_HIGH (arg1));
1739 t = force_fit_type (t,
1740 /* Don't set the overflow when
1741 converting a pointer */
1742 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1743 (TREE_INT_CST_HIGH (arg1) < 0
1744 && (TYPE_UNSIGNED (type)
1745 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1746 | TREE_OVERFLOW (arg1),
1747 TREE_CONSTANT_OVERFLOW (arg1));
1752 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1753 to an integer type. */
1756 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1761 /* The following code implements the floating point to integer
1762 conversion rules required by the Java Language Specification,
1763 that IEEE NaNs are mapped to zero and values that overflow
1764 the target precision saturate, i.e. values greater than
1765 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1766 are mapped to INT_MIN. These semantics are allowed by the
1767 C and C++ standards that simply state that the behavior of
1768 FP-to-integer conversion is unspecified upon overflow. */
1770 HOST_WIDE_INT high, low;
1772 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1776 case FIX_TRUNC_EXPR:
1777 real_trunc (&r, VOIDmode, &x);
1781 real_ceil (&r, VOIDmode, &x);
1784 case FIX_FLOOR_EXPR:
1785 real_floor (&r, VOIDmode, &x);
1788 case FIX_ROUND_EXPR:
1789 real_round (&r, VOIDmode, &x);
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r))
1804 /* See if R is less than the lower bound or greater than the
1809 tree lt = TYPE_MIN_VALUE (type);
1810 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1811 if (REAL_VALUES_LESS (r, l))
1814 high = TREE_INT_CST_HIGH (lt);
1815 low = TREE_INT_CST_LOW (lt);
1821 tree ut = TYPE_MAX_VALUE (type);
1824 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1825 if (REAL_VALUES_LESS (u, r))
1828 high = TREE_INT_CST_HIGH (ut);
1829 low = TREE_INT_CST_LOW (ut);
1835 REAL_VALUE_TO_INT (&low, &high, r);
1837 t = build_int_cst_wide (type, low, high);
1839 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1840 TREE_CONSTANT_OVERFLOW (arg1));
1844 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1845 to another floating point type. */
1848 fold_convert_const_real_from_real (tree type, tree arg1)
1850 REAL_VALUE_TYPE value;
1853 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1854 t = build_real (type, value);
1856 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1857 TREE_CONSTANT_OVERFLOW (t)
1858 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1862 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1863 type TYPE. If no simplification can be done return NULL_TREE. */
1866 fold_convert_const (enum tree_code code, tree type, tree arg1)
1868 if (TREE_TYPE (arg1) == type)
1871 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1873 if (TREE_CODE (arg1) == INTEGER_CST)
1874 return fold_convert_const_int_from_int (type, arg1);
1875 else if (TREE_CODE (arg1) == REAL_CST)
1876 return fold_convert_const_int_from_real (code, type, arg1);
1878 else if (TREE_CODE (type) == REAL_TYPE)
1880 if (TREE_CODE (arg1) == INTEGER_CST)
1881 return build_real_from_int_cst (type, arg1);
1882 if (TREE_CODE (arg1) == REAL_CST)
1883 return fold_convert_const_real_from_real (type, arg1);
1888 /* Construct a vector of zero elements of vector type TYPE. */
1891 build_zero_vector (tree type)
1896 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1897 units = TYPE_VECTOR_SUBPARTS (type);
1900 for (i = 0; i < units; i++)
1901 list = tree_cons (NULL_TREE, elem, list);
1902 return build_vector (type, list);
1905 /* Convert expression ARG to type TYPE. Used by the middle-end for
1906 simple conversions in preference to calling the front-end's convert. */
1909 fold_convert (tree type, tree arg)
1911 tree orig = TREE_TYPE (arg);
1917 if (TREE_CODE (arg) == ERROR_MARK
1918 || TREE_CODE (type) == ERROR_MARK
1919 || TREE_CODE (orig) == ERROR_MARK)
1920 return error_mark_node;
1922 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1923 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1924 TYPE_MAIN_VARIANT (orig)))
1925 return fold_build1 (NOP_EXPR, type, arg);
1927 switch (TREE_CODE (type))
1929 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1932 if (TREE_CODE (arg) == INTEGER_CST)
1934 tem = fold_convert_const (NOP_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1938 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1939 || TREE_CODE (orig) == OFFSET_TYPE)
1940 return fold_build1 (NOP_EXPR, type, arg);
1941 if (TREE_CODE (orig) == COMPLEX_TYPE)
1943 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert (type, tem);
1946 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1947 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1948 return fold_build1 (NOP_EXPR, type, arg);
1951 if (TREE_CODE (arg) == INTEGER_CST)
1953 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1954 if (tem != NULL_TREE)
1957 else if (TREE_CODE (arg) == REAL_CST)
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1964 switch (TREE_CODE (orig))
1966 case INTEGER_TYPE: case CHAR_TYPE:
1967 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1968 case POINTER_TYPE: case REFERENCE_TYPE:
1969 return fold_build1 (FLOAT_EXPR, type, arg);
1972 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1976 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1977 return fold_convert (type, tem);
1984 switch (TREE_CODE (orig))
1986 case INTEGER_TYPE: case CHAR_TYPE:
1987 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1988 case POINTER_TYPE: case REFERENCE_TYPE:
1990 return build2 (COMPLEX_EXPR, type,
1991 fold_convert (TREE_TYPE (type), arg),
1992 fold_convert (TREE_TYPE (type), integer_zero_node));
1997 if (TREE_CODE (arg) == COMPLEX_EXPR)
1999 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2000 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2001 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2004 arg = save_expr (arg);
2005 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2006 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2007 rpart = fold_convert (TREE_TYPE (type), rpart);
2008 ipart = fold_convert (TREE_TYPE (type), ipart);
2009 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2017 if (integer_zerop (arg))
2018 return build_zero_vector (type);
2019 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2020 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == VECTOR_TYPE);
2022 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2025 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2032 /* Return false if expr can be assumed not to be an lvalue, true
2036 maybe_lvalue_p (tree x)
2038 /* We only need to wrap lvalue tree codes. */
2039 switch (TREE_CODE (x))
2050 case ALIGN_INDIRECT_REF:
2051 case MISALIGNED_INDIRECT_REF:
2053 case ARRAY_RANGE_REF:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2074 /* Assume the worst for front-end tree codes. */
2075 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2083 /* Return an expr equal to X but certainly not valid as an lvalue. */
2088 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2093 if (! maybe_lvalue_p (x))
2095 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2099 Zero means allow extended lvalues. */
2101 int pedantic_lvalues;
2103 /* When pedantic, return an expr equal to X but certainly not valid as a
2104 pedantic lvalue. Otherwise, return X. */
2107 pedantic_non_lvalue (tree x)
2109 if (pedantic_lvalues)
2110 return non_lvalue (x);
2115 /* Given a tree comparison code, return the code that is the logical inverse
2116 of the given code. It is not safe to do this for floating-point
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2121 invert_tree_comparison (enum tree_code code, bool honor_nans)
2123 if (honor_nans && flag_trapping_math)
2133 return honor_nans ? UNLE_EXPR : LE_EXPR;
2135 return honor_nans ? UNLT_EXPR : LT_EXPR;
2137 return honor_nans ? UNGE_EXPR : GE_EXPR;
2139 return honor_nans ? UNGT_EXPR : GT_EXPR;
2153 return UNORDERED_EXPR;
2154 case UNORDERED_EXPR:
2155 return ORDERED_EXPR;
2161 /* Similar, but return the comparison that results if the operands are
2162 swapped. This is safe for floating-point. */
2165 swap_tree_comparison (enum tree_code code)
2172 case UNORDERED_EXPR:
2198 /* Convert a comparison tree code from an enum tree_code representation
2199 into a compcode bit-based encoding. This function is the inverse of
2200 compcode_to_comparison. */
2202 static enum comparison_code
2203 comparison_to_compcode (enum tree_code code)
2220 return COMPCODE_ORD;
2221 case UNORDERED_EXPR:
2222 return COMPCODE_UNORD;
2224 return COMPCODE_UNLT;
2226 return COMPCODE_UNEQ;
2228 return COMPCODE_UNLE;
2230 return COMPCODE_UNGT;
2232 return COMPCODE_LTGT;
2234 return COMPCODE_UNGE;
2240 /* Convert a compcode bit-based encoding of a comparison operator back
2241 to GCC's enum tree_code representation. This function is the
2242 inverse of comparison_to_compcode. */
2244 static enum tree_code
2245 compcode_to_comparison (enum comparison_code code)
2262 return ORDERED_EXPR;
2263 case COMPCODE_UNORD:
2264 return UNORDERED_EXPR;
2282 /* Return a tree for the comparison which is the combination of
2283 doing the AND or OR (depending on CODE) of the two operations LCODE
2284 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2285 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2286 if this makes the transformation invalid. */
2289 combine_comparisons (enum tree_code code, enum tree_code lcode,
2290 enum tree_code rcode, tree truth_type,
2291 tree ll_arg, tree lr_arg)
2293 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2294 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2295 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2296 enum comparison_code compcode;
2300 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2301 compcode = lcompcode & rcompcode;
2304 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2305 compcode = lcompcode | rcompcode;
2314 /* Eliminate unordered comparisons, as well as LTGT and ORD
2315 which are not used unless the mode has NaNs. */
2316 compcode &= ~COMPCODE_UNORD;
2317 if (compcode == COMPCODE_LTGT)
2318 compcode = COMPCODE_NE;
2319 else if (compcode == COMPCODE_ORD)
2320 compcode = COMPCODE_TRUE;
2322 else if (flag_trapping_math)
2324 /* Check that the original operation and the optimized ones will trap
2325 under the same condition. */
2326 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2327 && (lcompcode != COMPCODE_EQ)
2328 && (lcompcode != COMPCODE_ORD);
2329 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2330 && (rcompcode != COMPCODE_EQ)
2331 && (rcompcode != COMPCODE_ORD);
2332 bool trap = (compcode & COMPCODE_UNORD) == 0
2333 && (compcode != COMPCODE_EQ)
2334 && (compcode != COMPCODE_ORD);
2336 /* In a short-circuited boolean expression the LHS might be
2337 such that the RHS, if evaluated, will never trap. For
2338 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2339 if neither x nor y is NaN. (This is a mixed blessing: for
2340 example, the expression above will never trap, hence
2341 optimizing it to x < y would be invalid). */
2342 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2343 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2346 /* If the comparison was short-circuited, and only the RHS
2347 trapped, we may now generate a spurious trap. */
2349 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2352 /* If we changed the conditions that cause a trap, we lose. */
2353 if ((ltrap || rtrap) != trap)
2357 if (compcode == COMPCODE_TRUE)
2358 return constant_boolean_node (true, truth_type);
2359 else if (compcode == COMPCODE_FALSE)
2360 return constant_boolean_node (false, truth_type);
2362 return fold_build2 (compcode_to_comparison (compcode),
2363 truth_type, ll_arg, lr_arg);
2366 /* Return nonzero if CODE is a tree code that represents a truth value. */
2369 truth_value_p (enum tree_code code)
2371 return (TREE_CODE_CLASS (code) == tcc_comparison
2372 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2373 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2374 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2377 /* Return nonzero if two operands (typically of the same tree node)
2378 are necessarily equal. If either argument has side-effects this
2379 function returns zero. FLAGS modifies behavior as follows:
2381 If OEP_ONLY_CONST is set, only return nonzero for constants.
2382 This function tests whether the operands are indistinguishable;
2383 it does not test whether they are equal using C's == operation.
2384 The distinction is important for IEEE floating point, because
2385 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2386 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2388 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2389 even though it may hold multiple values during a function.
2390 This is because a GCC tree node guarantees that nothing else is
2391 executed between the evaluation of its "operands" (which may often
2392 be evaluated in arbitrary order). Hence if the operands themselves
2393 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2394 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2395 unset means assuming isochronic (or instantaneous) tree equivalence.
2396 Unless comparing arbitrary expression trees, such as from different
2397 statements, this flag can usually be left unset.
2399 If OEP_PURE_SAME is set, then pure functions with identical arguments
2400 are considered the same. It is used when the caller has other ways
2401 to ensure that global memory is unchanged in between. */
2404 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2406 /* If either is ERROR_MARK, they aren't equal. */
2407 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2410 /* If both types don't have the same signedness, then we can't consider
2411 them equal. We must check this before the STRIP_NOPS calls
2412 because they may change the signedness of the arguments. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2419 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2420 /* This is needed for conversions and for COMPONENT_REF.
2421 Might as well play it safe and always test this. */
2422 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2423 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2424 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2427 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2428 We don't care about side effects in that case because the SAVE_EXPR
2429 takes care of that for us. In all other cases, two expressions are
2430 equal if they have no side effects. If we have two identical
2431 expressions with side effects that should be treated the same due
2432 to the only side effects being identical SAVE_EXPR's, that will
2433 be detected in the recursive calls below. */
2434 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2435 && (TREE_CODE (arg0) == SAVE_EXPR
2436 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2439 /* Next handle constant cases, those for which we can return 1 even
2440 if ONLY_CONST is set. */
2441 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2442 switch (TREE_CODE (arg0))
2445 return (! TREE_CONSTANT_OVERFLOW (arg0)
2446 && ! TREE_CONSTANT_OVERFLOW (arg1)
2447 && tree_int_cst_equal (arg0, arg1));
2450 return (! TREE_CONSTANT_OVERFLOW (arg0)
2451 && ! TREE_CONSTANT_OVERFLOW (arg1)
2452 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2453 TREE_REAL_CST (arg1)));
2459 if (TREE_CONSTANT_OVERFLOW (arg0)
2460 || TREE_CONSTANT_OVERFLOW (arg1))
2463 v1 = TREE_VECTOR_CST_ELTS (arg0);
2464 v2 = TREE_VECTOR_CST_ELTS (arg1);
2467 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2470 v1 = TREE_CHAIN (v1);
2471 v2 = TREE_CHAIN (v2);
2478 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2480 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2484 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2485 && ! memcmp (TREE_STRING_POINTER (arg0),
2486 TREE_STRING_POINTER (arg1),
2487 TREE_STRING_LENGTH (arg0)));
2490 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2496 if (flags & OEP_ONLY_CONST)
2499 /* Define macros to test an operand from arg0 and arg1 for equality and a
2500 variant that allows null and views null as being different from any
2501 non-null value. In the latter case, if either is null, the both
2502 must be; otherwise, do the normal comparison. */
2503 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2504 TREE_OPERAND (arg1, N), flags)
2506 #define OP_SAME_WITH_NULL(N) \
2507 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2508 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2510 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2513 /* Two conversions are equal only if signedness and modes match. */
2514 switch (TREE_CODE (arg0))
2519 case FIX_TRUNC_EXPR:
2520 case FIX_FLOOR_EXPR:
2521 case FIX_ROUND_EXPR:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2533 case tcc_comparison:
2535 if (OP_SAME (0) && OP_SAME (1))
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0))
2540 && operand_equal_p (TREE_OPERAND (arg0, 0),
2541 TREE_OPERAND (arg1, 1), flags)
2542 && operand_equal_p (TREE_OPERAND (arg0, 1),
2543 TREE_OPERAND (arg1, 0), flags));
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1))
2552 switch (TREE_CODE (arg0))
2555 case ALIGN_INDIRECT_REF:
2556 case MISALIGNED_INDIRECT_REF:
2562 case ARRAY_RANGE_REF:
2563 /* Operands 2 and 3 may be null. */
2566 && OP_SAME_WITH_NULL (2)
2567 && OP_SAME_WITH_NULL (3));
2570 /* Handle operand 2 the same as for ARRAY_REF. */
2571 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2574 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2580 case tcc_expression:
2581 switch (TREE_CODE (arg0))
2584 case TRUTH_NOT_EXPR:
2587 case TRUTH_ANDIF_EXPR:
2588 case TRUTH_ORIF_EXPR:
2589 return OP_SAME (0) && OP_SAME (1);
2591 case TRUTH_AND_EXPR:
2593 case TRUTH_XOR_EXPR:
2594 if (OP_SAME (0) && OP_SAME (1))
2597 /* Otherwise take into account this is a commutative operation. */
2598 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2599 TREE_OPERAND (arg1, 1), flags)
2600 && operand_equal_p (TREE_OPERAND (arg0, 1),
2601 TREE_OPERAND (arg1, 0), flags));
2604 /* If the CALL_EXPRs call different functions, then they
2605 clearly can not be equal. */
2610 unsigned int cef = call_expr_flags (arg0);
2611 if (flags & OEP_PURE_SAME)
2612 cef &= ECF_CONST | ECF_PURE;
2619 /* Now see if all the arguments are the same. operand_equal_p
2620 does not handle TREE_LIST, so we walk the operands here
2621 feeding them to operand_equal_p. */
2622 arg0 = TREE_OPERAND (arg0, 1);
2623 arg1 = TREE_OPERAND (arg1, 1);
2624 while (arg0 && arg1)
2626 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2630 arg0 = TREE_CHAIN (arg0);
2631 arg1 = TREE_CHAIN (arg1);
2634 /* If we get here and both argument lists are exhausted
2635 then the CALL_EXPRs are equal. */
2636 return ! (arg0 || arg1);
2642 case tcc_declaration:
2643 /* Consider __builtin_sqrt equal to sqrt. */
2644 return (TREE_CODE (arg0) == FUNCTION_DECL
2645 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2646 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2647 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2654 #undef OP_SAME_WITH_NULL
2657 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2658 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2660 When in doubt, return 0. */
2663 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2665 int unsignedp1, unsignedpo;
2666 tree primarg0, primarg1, primother;
2667 unsigned int correct_width;
2669 if (operand_equal_p (arg0, arg1, 0))
2672 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2673 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2676 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2677 and see if the inner values are the same. This removes any
2678 signedness comparison, which doesn't matter here. */
2679 primarg0 = arg0, primarg1 = arg1;
2680 STRIP_NOPS (primarg0);
2681 STRIP_NOPS (primarg1);
2682 if (operand_equal_p (primarg0, primarg1, 0))
2685 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2686 actual comparison operand, ARG0.
2688 First throw away any conversions to wider types
2689 already present in the operands. */
2691 primarg1 = get_narrower (arg1, &unsignedp1);
2692 primother = get_narrower (other, &unsignedpo);
2694 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2695 if (unsignedp1 == unsignedpo
2696 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2697 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2699 tree type = TREE_TYPE (arg0);
2701 /* Make sure shorter operand is extended the right way
2702 to match the longer operand. */
2703 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2704 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2706 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2713 /* See if ARG is an expression that is either a comparison or is performing
2714 arithmetic on comparisons. The comparisons must only be comparing
2715 two different values, which will be stored in *CVAL1 and *CVAL2; if
2716 they are nonzero it means that some operands have already been found.
2717 No variables may be used anywhere else in the expression except in the
2718 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2719 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2721 If this is true, return 1. Otherwise, return zero. */
2724 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2726 enum tree_code code = TREE_CODE (arg);
2727 enum tree_code_class class = TREE_CODE_CLASS (code);
2729 /* We can handle some of the tcc_expression cases here. */
2730 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2732 else if (class == tcc_expression
2733 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2734 || code == COMPOUND_EXPR))
2737 else if (class == tcc_expression && code == SAVE_EXPR
2738 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2740 /* If we've already found a CVAL1 or CVAL2, this expression is
2741 two complex to handle. */
2742 if (*cval1 || *cval2)
2752 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2755 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2756 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2757 cval1, cval2, save_p));
2762 case tcc_expression:
2763 if (code == COND_EXPR)
2764 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2765 cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p)
2768 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2769 cval1, cval2, save_p));
2772 case tcc_comparison:
2773 /* First see if we can handle the first operand, then the second. For
2774 the second operand, we know *CVAL1 can't be zero. It must be that
2775 one side of the comparison is each of the values; test for the
2776 case where this isn't true by failing if the two operands
2779 if (operand_equal_p (TREE_OPERAND (arg, 0),
2780 TREE_OPERAND (arg, 1), 0))
2784 *cval1 = TREE_OPERAND (arg, 0);
2785 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2787 else if (*cval2 == 0)
2788 *cval2 = TREE_OPERAND (arg, 0);
2789 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2794 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2796 else if (*cval2 == 0)
2797 *cval2 = TREE_OPERAND (arg, 1);
2798 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2810 /* ARG is a tree that is known to contain just arithmetic operations and
2811 comparisons. Evaluate the operations in the tree substituting NEW0 for
2812 any occurrence of OLD0 as an operand of a comparison and likewise for
2816 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2818 tree type = TREE_TYPE (arg);
2819 enum tree_code code = TREE_CODE (arg);
2820 enum tree_code_class class = TREE_CODE_CLASS (code);
2822 /* We can handle some of the tcc_expression cases here. */
2823 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2825 else if (class == tcc_expression
2826 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2832 return fold_build1 (code, type,
2833 eval_subst (TREE_OPERAND (arg, 0),
2834 old0, new0, old1, new1));
2837 return fold_build2 (code, type,
2838 eval_subst (TREE_OPERAND (arg, 0),
2839 old0, new0, old1, new1),
2840 eval_subst (TREE_OPERAND (arg, 1),
2841 old0, new0, old1, new1));
2843 case tcc_expression:
2847 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2850 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2853 return fold_build3 (code, type,
2854 eval_subst (TREE_OPERAND (arg, 0),
2855 old0, new0, old1, new1),
2856 eval_subst (TREE_OPERAND (arg, 1),
2857 old0, new0, old1, new1),
2858 eval_subst (TREE_OPERAND (arg, 2),
2859 old0, new0, old1, new1));
2863 /* Fall through - ??? */
2865 case tcc_comparison:
2867 tree arg0 = TREE_OPERAND (arg, 0);
2868 tree arg1 = TREE_OPERAND (arg, 1);
2870 /* We need to check both for exact equality and tree equality. The
2871 former will be true if the operand has a side-effect. In that
2872 case, we know the operand occurred exactly once. */
2874 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2876 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2879 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2881 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2884 return fold_build2 (code, type, arg0, arg1);
2892 /* Return a tree for the case when the result of an expression is RESULT
2893 converted to TYPE and OMITTED was previously an operand of the expression
2894 but is now not needed (e.g., we folded OMITTED * 0).
2896 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2897 the conversion of RESULT to TYPE. */
2900 omit_one_operand (tree type, tree result, tree omitted)
2902 tree t = fold_convert (type, result);
2904 if (TREE_SIDE_EFFECTS (omitted))
2905 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2907 return non_lvalue (t);
2910 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2913 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2915 tree t = fold_convert (type, result);
2917 if (TREE_SIDE_EFFECTS (omitted))
2918 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2920 return pedantic_non_lvalue (t);
2923 /* Return a tree for the case when the result of an expression is RESULT
2924 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2925 of the expression but are now not needed.
2927 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2928 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2929 evaluated before OMITTED2. Otherwise, if neither has side effects,
2930 just do the conversion of RESULT to TYPE. */
2933 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2935 tree t = fold_convert (type, result);
2937 if (TREE_SIDE_EFFECTS (omitted2))
2938 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2939 if (TREE_SIDE_EFFECTS (omitted1))
2940 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2942 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2946 /* Return a simplified tree node for the truth-negation of ARG. This
2947 never alters ARG itself. We assume that ARG is an operation that
2948 returns a truth value (0 or 1).
2950 FIXME: one would think we would fold the result, but it causes
2951 problems with the dominator optimizer. */
2953 invert_truthvalue (tree arg)
2955 tree type = TREE_TYPE (arg);
2956 enum tree_code code = TREE_CODE (arg);
2958 if (code == ERROR_MARK)
2961 /* If this is a comparison, we can simply invert it, except for
2962 floating-point non-equality comparisons, in which case we just
2963 enclose a TRUTH_NOT_EXPR around what we have. */
2965 if (TREE_CODE_CLASS (code) == tcc_comparison)
2967 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2968 if (FLOAT_TYPE_P (op_type)
2969 && flag_trapping_math
2970 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2971 && code != NE_EXPR && code != EQ_EXPR)
2972 return build1 (TRUTH_NOT_EXPR, type, arg);
2975 code = invert_tree_comparison (code,
2976 HONOR_NANS (TYPE_MODE (op_type)));
2977 if (code == ERROR_MARK)
2978 return build1 (TRUTH_NOT_EXPR, type, arg);
2980 return build2 (code, type,
2981 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2988 return constant_boolean_node (integer_zerop (arg), type);
2990 case TRUTH_AND_EXPR:
2991 return build2 (TRUTH_OR_EXPR, type,
2992 invert_truthvalue (TREE_OPERAND (arg, 0)),
2993 invert_truthvalue (TREE_OPERAND (arg, 1)));
2996 return build2 (TRUTH_AND_EXPR, type,
2997 invert_truthvalue (TREE_OPERAND (arg, 0)),
2998 invert_truthvalue (TREE_OPERAND (arg, 1)));
3000 case TRUTH_XOR_EXPR:
3001 /* Here we can invert either operand. We invert the first operand
3002 unless the second operand is a TRUTH_NOT_EXPR in which case our
3003 result is the XOR of the first operand with the inside of the
3004 negation of the second operand. */
3006 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3007 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3008 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3010 return build2 (TRUTH_XOR_EXPR, type,
3011 invert_truthvalue (TREE_OPERAND (arg, 0)),
3012 TREE_OPERAND (arg, 1));
3014 case TRUTH_ANDIF_EXPR:
3015 return build2 (TRUTH_ORIF_EXPR, type,
3016 invert_truthvalue (TREE_OPERAND (arg, 0)),
3017 invert_truthvalue (TREE_OPERAND (arg, 1)));
3019 case TRUTH_ORIF_EXPR:
3020 return build2 (TRUTH_ANDIF_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 invert_truthvalue (TREE_OPERAND (arg, 1)));
3024 case TRUTH_NOT_EXPR:
3025 return TREE_OPERAND (arg, 0);
3029 tree arg1 = TREE_OPERAND (arg, 1);
3030 tree arg2 = TREE_OPERAND (arg, 2);
3031 /* A COND_EXPR may have a throw as one operand, which
3032 then has void type. Just leave void operands
3034 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3035 VOID_TYPE_P (TREE_TYPE (arg1))
3036 ? arg1 : invert_truthvalue (arg1),
3037 VOID_TYPE_P (TREE_TYPE (arg2))
3038 ? arg2 : invert_truthvalue (arg2));
3042 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3043 invert_truthvalue (TREE_OPERAND (arg, 1)));
3045 case NON_LVALUE_EXPR:
3046 return invert_truthvalue (TREE_OPERAND (arg, 0));
3049 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3054 return build1 (TREE_CODE (arg), type,
3055 invert_truthvalue (TREE_OPERAND (arg, 0)));
3058 if (!integer_onep (TREE_OPERAND (arg, 1)))
3060 return build2 (EQ_EXPR, type, arg,
3061 fold_convert (type, integer_zero_node));
3064 return build1 (TRUTH_NOT_EXPR, type, arg);
3066 case CLEANUP_POINT_EXPR:
3067 return build1 (CLEANUP_POINT_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)));
3073 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3074 return build1 (TRUTH_NOT_EXPR, type, arg);
3077 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3078 operands are another bit-wise operation with a common input. If so,
3079 distribute the bit operations to save an operation and possibly two if
3080 constants are involved. For example, convert
3081 (A | B) & (A | C) into A | (B & C)
3082 Further simplification will occur if B and C are constants.
3084 If this optimization cannot be done, 0 will be returned. */
3087 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3092 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3093 || TREE_CODE (arg0) == code
3094 || (TREE_CODE (arg0) != BIT_AND_EXPR
3095 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3098 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3100 common = TREE_OPERAND (arg0, 0);
3101 left = TREE_OPERAND (arg0, 1);
3102 right = TREE_OPERAND (arg1, 1);
3104 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3106 common = TREE_OPERAND (arg0, 0);
3107 left = TREE_OPERAND (arg0, 1);
3108 right = TREE_OPERAND (arg1, 0);
3110 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3112 common = TREE_OPERAND (arg0, 1);
3113 left = TREE_OPERAND (arg0, 0);
3114 right = TREE_OPERAND (arg1, 1);
3116 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3118 common = TREE_OPERAND (arg0, 1);
3119 left = TREE_OPERAND (arg0, 0);
3120 right = TREE_OPERAND (arg1, 0);
3125 return fold_build2 (TREE_CODE (arg0), type, common,
3126 fold_build2 (code, type, left, right));
3129 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3130 with code CODE. This optimization is unsafe. */
3132 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3134 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3135 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3137 /* (A / C) +- (B / C) -> (A +- B) / C. */
3139 && operand_equal_p (TREE_OPERAND (arg0, 1),
3140 TREE_OPERAND (arg1, 1), 0))
3141 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3142 fold_build2 (code, type,
3143 TREE_OPERAND (arg0, 0),
3144 TREE_OPERAND (arg1, 0)),
3145 TREE_OPERAND (arg0, 1));
3147 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3148 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3149 TREE_OPERAND (arg1, 0), 0)
3150 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3151 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3153 REAL_VALUE_TYPE r0, r1;
3154 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3155 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3157 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3159 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3160 real_arithmetic (&r0, code, &r0, &r1);
3161 return fold_build2 (MULT_EXPR, type,
3162 TREE_OPERAND (arg0, 0),
3163 build_real (type, r0));
3169 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3170 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3173 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3180 tree size = TYPE_SIZE (TREE_TYPE (inner));
3181 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3182 || POINTER_TYPE_P (TREE_TYPE (inner)))
3183 && host_integerp (size, 0)
3184 && tree_low_cst (size, 0) == bitsize)
3185 return fold_convert (type, inner);
3188 result = build3 (BIT_FIELD_REF, type, inner,
3189 size_int (bitsize), bitsize_int (bitpos));
3191 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3196 /* Optimize a bit-field compare.
3198 There are two cases: First is a compare against a constant and the
3199 second is a comparison of two items where the fields are at the same
3200 bit position relative to the start of a chunk (byte, halfword, word)
3201 large enough to contain it. In these cases we can avoid the shift
3202 implicit in bitfield extractions.
3204 For constants, we emit a compare of the shifted constant with the
3205 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3206 compared. For two fields at the same position, we do the ANDs with the
3207 similar mask and compare the result of the ANDs.
3209 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3210 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3211 are the left and right operands of the comparison, respectively.
3213 If the optimization described above can be done, we return the resulting
3214 tree. Otherwise we return zero. */
3217 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3220 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3221 tree type = TREE_TYPE (lhs);
3222 tree signed_type, unsigned_type;
3223 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3224 enum machine_mode lmode, rmode, nmode;
3225 int lunsignedp, runsignedp;
3226 int lvolatilep = 0, rvolatilep = 0;
3227 tree linner, rinner = NULL_TREE;
3231 /* Get all the information about the extractions being done. If the bit size
3232 if the same as the size of the underlying object, we aren't doing an
3233 extraction at all and so can do nothing. We also don't want to
3234 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3235 then will no longer be able to replace it. */
3236 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3237 &lunsignedp, &lvolatilep, false);
3238 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3239 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3244 /* If this is not a constant, we can only do something if bit positions,
3245 sizes, and signedness are the same. */
3246 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3247 &runsignedp, &rvolatilep, false);
3249 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3250 || lunsignedp != runsignedp || offset != 0
3251 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3255 /* See if we can find a mode to refer to this field. We should be able to,
3256 but fail if we can't. */
3257 nmode = get_best_mode (lbitsize, lbitpos,
3258 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3259 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3260 TYPE_ALIGN (TREE_TYPE (rinner))),
3261 word_mode, lvolatilep || rvolatilep);
3262 if (nmode == VOIDmode)
3265 /* Set signed and unsigned types of the precision of this mode for the
3267 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3268 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3270 /* Compute the bit position and size for the new reference and our offset
3271 within it. If the new reference is the same size as the original, we
3272 won't optimize anything, so return zero. */
3273 nbitsize = GET_MODE_BITSIZE (nmode);
3274 nbitpos = lbitpos & ~ (nbitsize - 1);
3276 if (nbitsize == lbitsize)
3279 if (BYTES_BIG_ENDIAN)
3280 lbitpos = nbitsize - lbitsize - lbitpos;
3282 /* Make the mask to be used against the extracted field. */
3283 mask = build_int_cst (unsigned_type, -1);
3284 mask = force_fit_type (mask, 0, false, false);
3285 mask = fold_convert (unsigned_type, mask);
3286 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3287 mask = const_binop (RSHIFT_EXPR, mask,
3288 size_int (nbitsize - lbitsize - lbitpos), 0);
3291 /* If not comparing with constant, just rework the comparison
3293 return build2 (code, compare_type,
3294 build2 (BIT_AND_EXPR, unsigned_type,
3295 make_bit_field_ref (linner, unsigned_type,
3296 nbitsize, nbitpos, 1),
3298 build2 (BIT_AND_EXPR, unsigned_type,
3299 make_bit_field_ref (rinner, unsigned_type,
3300 nbitsize, nbitpos, 1),
3303 /* Otherwise, we are handling the constant case. See if the constant is too
3304 big for the field. Warn and return a tree of for 0 (false) if so. We do
3305 this not only for its own sake, but to avoid having to test for this
3306 error case below. If we didn't, we might generate wrong code.
3308 For unsigned fields, the constant shifted right by the field length should
3309 be all zero. For signed fields, the high-order bits should agree with
3314 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3315 fold_convert (unsigned_type, rhs),
3316 size_int (lbitsize), 0)))
3318 warning (0, "comparison is always %d due to width of bit-field",
3320 return constant_boolean_node (code == NE_EXPR, compare_type);
3325 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3326 size_int (lbitsize - 1), 0);
3327 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3329 warning (0, "comparison is always %d due to width of bit-field",
3331 return constant_boolean_node (code == NE_EXPR, compare_type);
3335 /* Single-bit compares should always be against zero. */
3336 if (lbitsize == 1 && ! integer_zerop (rhs))
3338 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3339 rhs = fold_convert (type, integer_zero_node);
3342 /* Make a new bitfield reference, shift the constant over the
3343 appropriate number of bits and mask it with the computed mask
3344 (in case this was a signed field). If we changed it, make a new one. */
3345 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3348 TREE_SIDE_EFFECTS (lhs) = 1;
3349 TREE_THIS_VOLATILE (lhs) = 1;
3352 rhs = const_binop (BIT_AND_EXPR,
3353 const_binop (LSHIFT_EXPR,
3354 fold_convert (unsigned_type, rhs),
3355 size_int (lbitpos), 0),
3358 return build2 (code, compare_type,
3359 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3363 /* Subroutine for fold_truthop: decode a field reference.
3365 If EXP is a comparison reference, we return the innermost reference.
3367 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3368 set to the starting bit number.
3370 If the innermost field can be completely contained in a mode-sized
3371 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3373 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3374 otherwise it is not changed.
3376 *PUNSIGNEDP is set to the signedness of the field.
3378 *PMASK is set to the mask used. This is either contained in a
3379 BIT_AND_EXPR or derived from the width of the field.
3381 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3383 Return 0 if this is not a component reference or is one that we can't
3384 do anything with. */
3387 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3388 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3389 int *punsignedp, int *pvolatilep,
3390 tree *pmask, tree *pand_mask)
3392 tree outer_type = 0;
3394 tree mask, inner, offset;
3396 unsigned int precision;
3398 /* All the optimizations using this function assume integer fields.
3399 There are problems with FP fields since the type_for_size call
3400 below can fail for, e.g., XFmode. */
3401 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3404 /* We are interested in the bare arrangement of bits, so strip everything
3405 that doesn't affect the machine mode. However, record the type of the
3406 outermost expression if it may matter below. */
3407 if (TREE_CODE (exp) == NOP_EXPR
3408 || TREE_CODE (exp) == CONVERT_EXPR
3409 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3410 outer_type = TREE_TYPE (exp);
3413 if (TREE_CODE (exp) == BIT_AND_EXPR)
3415 and_mask = TREE_OPERAND (exp, 1);
3416 exp = TREE_OPERAND (exp, 0);
3417 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3418 if (TREE_CODE (and_mask) != INTEGER_CST)
3422 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3423 punsignedp, pvolatilep, false);
3424 if ((inner == exp && and_mask == 0)
3425 || *pbitsize < 0 || offset != 0
3426 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3429 /* If the number of bits in the reference is the same as the bitsize of
3430 the outer type, then the outer type gives the signedness. Otherwise
3431 (in case of a small bitfield) the signedness is unchanged. */
3432 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3433 *punsignedp = TYPE_UNSIGNED (outer_type);
3435 /* Compute the mask to access the bitfield. */
3436 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3437 precision = TYPE_PRECISION (unsigned_type);
3439 mask = build_int_cst (unsigned_type, -1);
3440 mask = force_fit_type (mask, 0, false, false);
3442 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3443 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3445 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3447 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3448 fold_convert (unsigned_type, and_mask), mask);
3451 *pand_mask = and_mask;
3455 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3459 all_ones_mask_p (tree mask, int size)
3461 tree type = TREE_TYPE (mask);
3462 unsigned int precision = TYPE_PRECISION (type);
3465 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3466 tmask = force_fit_type (tmask, 0, false, false);
3469 tree_int_cst_equal (mask,
3470 const_binop (RSHIFT_EXPR,
3471 const_binop (LSHIFT_EXPR, tmask,
3472 size_int (precision - size),
3474 size_int (precision - size), 0));
3477 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3478 represents the sign bit of EXP's type. If EXP represents a sign
3479 or zero extension, also test VAL against the unextended type.
3480 The return value is the (sub)expression whose sign bit is VAL,
3481 or NULL_TREE otherwise. */
3484 sign_bit_p (tree exp, tree val)
3486 unsigned HOST_WIDE_INT mask_lo, lo;
3487 HOST_WIDE_INT mask_hi, hi;
3491 /* Tree EXP must have an integral type. */
3492 t = TREE_TYPE (exp);
3493 if (! INTEGRAL_TYPE_P (t))
3496 /* Tree VAL must be an integer constant. */
3497 if (TREE_CODE (val) != INTEGER_CST
3498 || TREE_CONSTANT_OVERFLOW (val))
3501 width = TYPE_PRECISION (t);
3502 if (width > HOST_BITS_PER_WIDE_INT)
3504 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3507 mask_hi = ((unsigned HOST_WIDE_INT) -1
3508 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3514 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3517 mask_lo = ((unsigned HOST_WIDE_INT) -1
3518 >> (HOST_BITS_PER_WIDE_INT - width));
3521 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3522 treat VAL as if it were unsigned. */
3523 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3524 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3527 /* Handle extension from a narrower type. */
3528 if (TREE_CODE (exp) == NOP_EXPR
3529 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3530 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3535 /* Subroutine for fold_truthop: determine if an operand is simple enough
3536 to be evaluated unconditionally. */
3539 simple_operand_p (tree exp)
3541 /* Strip any conversions that don't change the machine mode. */
3544 return (CONSTANT_CLASS_P (exp)
3545 || TREE_CODE (exp) == SSA_NAME
3547 && ! TREE_ADDRESSABLE (exp)
3548 && ! TREE_THIS_VOLATILE (exp)
3549 && ! DECL_NONLOCAL (exp)
3550 /* Don't regard global variables as simple. They may be
3551 allocated in ways unknown to the compiler (shared memory,
3552 #pragma weak, etc). */
3553 && ! TREE_PUBLIC (exp)
3554 && ! DECL_EXTERNAL (exp)
3555 /* Loading a static variable is unduly expensive, but global
3556 registers aren't expensive. */
3557 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3560 /* The following functions are subroutines to fold_range_test and allow it to
3561 try to change a logical combination of comparisons into a range test.
3564 X == 2 || X == 3 || X == 4 || X == 5
3568 (unsigned) (X - 2) <= 3
3570 We describe each set of comparisons as being either inside or outside
3571 a range, using a variable named like IN_P, and then describe the
3572 range with a lower and upper bound. If one of the bounds is omitted,
3573 it represents either the highest or lowest value of the type.
3575 In the comments below, we represent a range by two numbers in brackets
3576 preceded by a "+" to designate being inside that range, or a "-" to
3577 designate being outside that range, so the condition can be inverted by
3578 flipping the prefix. An omitted bound is represented by a "-". For
3579 example, "- [-, 10]" means being outside the range starting at the lowest
3580 possible value and ending at 10, in other words, being greater than 10.
3581 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3584 We set up things so that the missing bounds are handled in a consistent
3585 manner so neither a missing bound nor "true" and "false" need to be
3586 handled using a special case. */
3588 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3589 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3590 and UPPER1_P are nonzero if the respective argument is an upper bound
3591 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3592 must be specified for a comparison. ARG1 will be converted to ARG0's
3593 type if both are specified. */
3596 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3597 tree arg1, int upper1_p)
3603 /* If neither arg represents infinity, do the normal operation.
3604 Else, if not a comparison, return infinity. Else handle the special
3605 comparison rules. Note that most of the cases below won't occur, but
3606 are handled for consistency. */
3608 if (arg0 != 0 && arg1 != 0)
3610 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3611 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3613 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3616 if (TREE_CODE_CLASS (code) != tcc_comparison)
3619 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3620 for neither. In real maths, we cannot assume open ended ranges are
3621 the same. But, this is computer arithmetic, where numbers are finite.
3622 We can therefore make the transformation of any unbounded range with
3623 the value Z, Z being greater than any representable number. This permits
3624 us to treat unbounded ranges as equal. */
3625 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3626 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3630 result = sgn0 == sgn1;
3633 result = sgn0 != sgn1;
3636 result = sgn0 < sgn1;
3639 result = sgn0 <= sgn1;
3642 result = sgn0 > sgn1;
3645 result = sgn0 >= sgn1;
3651 return constant_boolean_node (result, type);
3654 /* Given EXP, a logical expression, set the range it is testing into
3655 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3656 actually being tested. *PLOW and *PHIGH will be made of the same type
3657 as the returned expression. If EXP is not a comparison, we will most
3658 likely not be returning a useful value and range. */
3661 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3663 enum tree_code code;
3664 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3665 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3667 tree low, high, n_low, n_high;
3669 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3670 and see if we can refine the range. Some of the cases below may not
3671 happen, but it doesn't seem worth worrying about this. We "continue"
3672 the outer loop when we've changed something; otherwise we "break"
3673 the switch, which will "break" the while. */
3676 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3680 code = TREE_CODE (exp);
3681 exp_type = TREE_TYPE (exp);
3683 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3685 if (TREE_CODE_LENGTH (code) > 0)
3686 arg0 = TREE_OPERAND (exp, 0);
3687 if (TREE_CODE_CLASS (code) == tcc_comparison
3688 || TREE_CODE_CLASS (code) == tcc_unary
3689 || TREE_CODE_CLASS (code) == tcc_binary)
3690 arg0_type = TREE_TYPE (arg0);
3691 if (TREE_CODE_CLASS (code) == tcc_binary
3692 || TREE_CODE_CLASS (code) == tcc_comparison
3693 || (TREE_CODE_CLASS (code) == tcc_expression
3694 && TREE_CODE_LENGTH (code) > 1))
3695 arg1 = TREE_OPERAND (exp, 1);
3700 case TRUTH_NOT_EXPR:
3701 in_p = ! in_p, exp = arg0;
3704 case EQ_EXPR: case NE_EXPR:
3705 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3706 /* We can only do something if the range is testing for zero
3707 and if the second operand is an integer constant. Note that
3708 saying something is "in" the range we make is done by
3709 complementing IN_P since it will set in the initial case of
3710 being not equal to zero; "out" is leaving it alone. */
3711 if (low == 0 || high == 0
3712 || ! integer_zerop (low) || ! integer_zerop (high)
3713 || TREE_CODE (arg1) != INTEGER_CST)
3718 case NE_EXPR: /* - [c, c] */
3721 case EQ_EXPR: /* + [c, c] */
3722 in_p = ! in_p, low = high = arg1;
3724 case GT_EXPR: /* - [-, c] */
3725 low = 0, high = arg1;
3727 case GE_EXPR: /* + [c, -] */
3728 in_p = ! in_p, low = arg1, high = 0;
3730 case LT_EXPR: /* - [c, -] */
3731 low = arg1, high = 0;
3733 case LE_EXPR: /* + [-, c] */
3734 in_p = ! in_p, low = 0, high = arg1;
3740 /* If this is an unsigned comparison, we also know that EXP is
3741 greater than or equal to zero. We base the range tests we make
3742 on that fact, so we record it here so we can parse existing
3743 range tests. We test arg0_type since often the return type
3744 of, e.g. EQ_EXPR, is boolean. */
3745 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3747 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3749 fold_convert (arg0_type, integer_zero_node),
3753 in_p = n_in_p, low = n_low, high = n_high;
3755 /* If the high bound is missing, but we have a nonzero low
3756 bound, reverse the range so it goes from zero to the low bound
3758 if (high == 0 && low && ! integer_zerop (low))
3761 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3762 integer_one_node, 0);
3763 low = fold_convert (arg0_type, integer_zero_node);
3771 /* (-x) IN [a,b] -> x in [-b, -a] */
3772 n_low = range_binop (MINUS_EXPR, exp_type,
3773 fold_convert (exp_type, integer_zero_node),
3775 n_high = range_binop (MINUS_EXPR, exp_type,
3776 fold_convert (exp_type, integer_zero_node),
3778 low = n_low, high = n_high;
3784 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3785 fold_convert (exp_type, integer_one_node));
3788 case PLUS_EXPR: case MINUS_EXPR:
3789 if (TREE_CODE (arg1) != INTEGER_CST)
3792 /* If EXP is signed, any overflow in the computation is undefined,
3793 so we don't worry about it so long as our computations on
3794 the bounds don't overflow. For unsigned, overflow is defined
3795 and this is exactly the right thing. */
3796 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3797 arg0_type, low, 0, arg1, 0);
3798 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3799 arg0_type, high, 1, arg1, 0);
3800 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3801 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3804 /* Check for an unsigned range which has wrapped around the maximum
3805 value thus making n_high < n_low, and normalize it. */
3806 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3808 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3809 integer_one_node, 0);
3810 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3811 integer_one_node, 0);
3813 /* If the range is of the form +/- [ x+1, x ], we won't
3814 be able to normalize it. But then, it represents the
3815 whole range or the empty set, so make it
3817 if (tree_int_cst_equal (n_low, low)
3818 && tree_int_cst_equal (n_high, high))
3824 low = n_low, high = n_high;
3829 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3830 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3833 if (! INTEGRAL_TYPE_P (arg0_type)
3834 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3835 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3838 n_low = low, n_high = high;
3841 n_low = fold_convert (arg0_type, n_low);
3844 n_high = fold_convert (arg0_type, n_high);
3847 /* If we're converting arg0 from an unsigned type, to exp,
3848 a signed type, we will be doing the comparison as unsigned.
3849 The tests above have already verified that LOW and HIGH
3852 So we have to ensure that we will handle large unsigned
3853 values the same way that the current signed bounds treat
3856 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3859 tree equiv_type = lang_hooks.types.type_for_mode
3860 (TYPE_MODE (arg0_type), 1);
3862 /* A range without an upper bound is, naturally, unbounded.
3863 Since convert would have cropped a very large value, use
3864 the max value for the destination type. */
3866 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3867 : TYPE_MAX_VALUE (arg0_type);
3869 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3870 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3871 fold_convert (arg0_type,
3873 fold_convert (arg0_type,
3876 /* If the low bound is specified, "and" the range with the
3877 range for which the original unsigned value will be
3881 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3882 1, n_low, n_high, 1,
3883 fold_convert (arg0_type,
3888 in_p = (n_in_p == in_p);
3892 /* Otherwise, "or" the range with the range of the input
3893 that will be interpreted as negative. */
3894 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3895 0, n_low, n_high, 1,
3896 fold_convert (arg0_type,
3901 in_p = (in_p != n_in_p);
3906 low = n_low, high = n_high;
3916 /* If EXP is a constant, we can evaluate whether this is true or false. */
3917 if (TREE_CODE (exp) == INTEGER_CST)
3919 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3921 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3927 *pin_p = in_p, *plow = low, *phigh = high;
3931 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3932 type, TYPE, return an expression to test if EXP is in (or out of, depending
3933 on IN_P) the range. Return 0 if the test couldn't be created. */
3936 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3938 tree etype = TREE_TYPE (exp);
3941 #ifdef HAVE_canonicalize_funcptr_for_compare
3942 /* Disable this optimization for function pointer expressions
3943 on targets that require function pointer canonicalization. */
3944 if (HAVE_canonicalize_funcptr_for_compare
3945 && TREE_CODE (etype) == POINTER_TYPE
3946 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3952 value = build_range_check (type, exp, 1, low, high);
3954 return invert_truthvalue (value);
3959 if (low == 0 && high == 0)
3960 return fold_convert (type, integer_one_node);
3963 return fold_build2 (LE_EXPR, type, exp,
3964 fold_convert (etype, high));
3967 return fold_build2 (GE_EXPR, type, exp,
3968 fold_convert (etype, low));
3970 if (operand_equal_p (low, high, 0))
3971 return fold_build2 (EQ_EXPR, type, exp,
3972 fold_convert (etype, low));
3974 if (integer_zerop (low))
3976 if (! TYPE_UNSIGNED (etype))
3978 etype = lang_hooks.types.unsigned_type (etype);
3979 high = fold_convert (etype, high);
3980 exp = fold_convert (etype, exp);
3982 return build_range_check (type, exp, 1, 0, high);
3985 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3986 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3988 unsigned HOST_WIDE_INT lo;
3992 prec = TYPE_PRECISION (etype);
3993 if (prec <= HOST_BITS_PER_WIDE_INT)
3996 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4000 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4001 lo = (unsigned HOST_WIDE_INT) -1;
4004 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4006 if (TYPE_UNSIGNED (etype))
4008 etype = lang_hooks.types.signed_type (etype);
4009 exp = fold_convert (etype, exp);
4011 return fold_build2 (GT_EXPR, type, exp,
4012 fold_convert (etype, integer_zero_node));
4016 value = const_binop (MINUS_EXPR, high, low, 0);
4017 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4018 && ! TYPE_UNSIGNED (etype))
4020 tree utype, minv, maxv;
4022 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4023 for the type in question, as we rely on this here. */
4024 switch (TREE_CODE (etype))
4029 /* There is no requirement that LOW be within the range of ETYPE
4030 if the latter is a subtype. It must, however, be within the base
4031 type of ETYPE. So be sure we do the subtraction in that type. */
4032 if (TREE_TYPE (etype))
4033 etype = TREE_TYPE (etype);
4034 utype = lang_hooks.types.unsigned_type (etype);
4035 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4036 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4037 integer_one_node, 1);
4038 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4039 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4043 high = fold_convert (etype, high);
4044 low = fold_convert (etype, low);
4045 exp = fold_convert (etype, exp);
4046 value = const_binop (MINUS_EXPR, high, low, 0);
4054 if (value != 0 && ! TREE_OVERFLOW (value))
4055 return build_range_check (type,
4056 fold_build2 (MINUS_EXPR, etype, exp, low),
4057 1, fold_convert (etype, integer_zero_node),
4063 /* Given two ranges, see if we can merge them into one. Return 1 if we
4064 can, 0 if we can't. Set the output range into the specified parameters. */
4067 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4068 tree high0, int in1_p, tree low1, tree high1)
4076 int lowequal = ((low0 == 0 && low1 == 0)
4077 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4078 low0, 0, low1, 0)));
4079 int highequal = ((high0 == 0 && high1 == 0)
4080 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4081 high0, 1, high1, 1)));
4083 /* Make range 0 be the range that starts first, or ends last if they
4084 start at the same value. Swap them if it isn't. */
4085 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4088 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4089 high1, 1, high0, 1))))
4091 temp = in0_p, in0_p = in1_p, in1_p = temp;
4092 tem = low0, low0 = low1, low1 = tem;
4093 tem = high0, high0 = high1, high1 = tem;
4096 /* Now flag two cases, whether the ranges are disjoint or whether the
4097 second range is totally subsumed in the first. Note that the tests
4098 below are simplified by the ones above. */
4099 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4100 high0, 1, low1, 0));
4101 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4102 high1, 1, high0, 1));
4104 /* We now have four cases, depending on whether we are including or
4105 excluding the two ranges. */
4108 /* If they don't overlap, the result is false. If the second range
4109 is a subset it is the result. Otherwise, the range is from the start
4110 of the second to the end of the first. */
4112 in_p = 0, low = high = 0;
4114 in_p = 1, low = low1, high = high1;
4116 in_p = 1, low = low1, high = high0;
4119 else if (in0_p && ! in1_p)
4121 /* If they don't overlap, the result is the first range. If they are
4122 equal, the result is false. If the second range is a subset of the
4123 first, and the ranges begin at the same place, we go from just after
4124 the end of the first range to the end of the second. If the second
4125 range is not a subset of the first, or if it is a subset and both
4126 ranges end at the same place, the range starts at the start of the
4127 first range and ends just before the second range.
4128 Otherwise, we can't describe this as a single range. */
4130 in_p = 1, low = low0, high = high0;
4131 else if (lowequal && highequal)
4132 in_p = 0, low = high = 0;
4133 else if (subset && lowequal)
4135 in_p = 1, high = high0;
4136 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4137 integer_one_node, 0);
4139 else if (! subset || highequal)
4141 in_p = 1, low = low0;
4142 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4143 integer_one_node, 0);
4149 else if (! in0_p && in1_p)
4151 /* If they don't overlap, the result is the second range. If the second
4152 is a subset of the first, the result is false. Otherwise,
4153 the range starts just after the first range and ends at the
4154 end of the second. */
4156 in_p = 1, low = low1, high = high1;
4157 else if (subset || highequal)
4158 in_p = 0, low = high = 0;
4161 in_p = 1, high = high1;
4162 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4163 integer_one_node, 0);
4169 /* The case where we are excluding both ranges. Here the complex case
4170 is if they don't overlap. In that case, the only time we have a
4171 range is if they are adjacent. If the second is a subset of the
4172 first, the result is the first. Otherwise, the range to exclude
4173 starts at the beginning of the first range and ends at the end of the
4177 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4178 range_binop (PLUS_EXPR, NULL_TREE,
4180 integer_one_node, 1),
4182 in_p = 0, low = low0, high = high1;
4185 /* Canonicalize - [min, x] into - [-, x]. */
4186 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4187 switch (TREE_CODE (TREE_TYPE (low0)))
4190 if (TYPE_PRECISION (TREE_TYPE (low0))
4191 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4196 if (tree_int_cst_equal (low0,
4197 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4201 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4202 && integer_zerop (low0))
4209 /* Canonicalize - [x, max] into - [x, -]. */
4210 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4211 switch (TREE_CODE (TREE_TYPE (high1)))
4214 if (TYPE_PRECISION (TREE_TYPE (high1))
4215 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4220 if (tree_int_cst_equal (high1,
4221 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4225 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4226 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4228 integer_one_node, 1)))
4235 /* The ranges might be also adjacent between the maximum and
4236 minimum values of the given type. For
4237 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4238 return + [x + 1, y - 1]. */
4239 if (low0 == 0 && high1 == 0)
4241 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4242 integer_one_node, 1);
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4244 integer_one_node, 0);
4245 if (low == 0 || high == 0)
4255 in_p = 0, low = low0, high = high0;
4257 in_p = 0, low = low0, high = high1;
4260 *pin_p = in_p, *plow = low, *phigh = high;
4265 /* Subroutine of fold, looking inside expressions of the form
4266 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4267 of the COND_EXPR. This function is being used also to optimize
4268 A op B ? C : A, by reversing the comparison first.
4270 Return a folded expression whose code is not a COND_EXPR
4271 anymore, or NULL_TREE if no folding opportunity is found. */
4274 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4276 enum tree_code comp_code = TREE_CODE (arg0);
4277 tree arg00 = TREE_OPERAND (arg0, 0);
4278 tree arg01 = TREE_OPERAND (arg0, 1);
4279 tree arg1_type = TREE_TYPE (arg1);
4285 /* If we have A op 0 ? A : -A, consider applying the following
4288 A == 0? A : -A same as -A
4289 A != 0? A : -A same as A
4290 A >= 0? A : -A same as abs (A)
4291 A > 0? A : -A same as abs (A)
4292 A <= 0? A : -A same as -abs (A)
4293 A < 0? A : -A same as -abs (A)
4295 None of these transformations work for modes with signed
4296 zeros. If A is +/-0, the first two transformations will
4297 change the sign of the result (from +0 to -0, or vice
4298 versa). The last four will fix the sign of the result,
4299 even though the original expressions could be positive or
4300 negative, depending on the sign of A.
4302 Note that all these transformations are correct if A is
4303 NaN, since the two alternatives (A and -A) are also NaNs. */
4304 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4305 ? real_zerop (arg01)
4306 : integer_zerop (arg01))
4307 && ((TREE_CODE (arg2) == NEGATE_EXPR
4308 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4309 /* In the case that A is of the form X-Y, '-A' (arg2) may
4310 have already been folded to Y-X, check for that. */
4311 || (TREE_CODE (arg1) == MINUS_EXPR
4312 && TREE_CODE (arg2) == MINUS_EXPR
4313 && operand_equal_p (TREE_OPERAND (arg1, 0),
4314 TREE_OPERAND (arg2, 1), 0)
4315 && operand_equal_p (TREE_OPERAND (arg1, 1),
4316 TREE_OPERAND (arg2, 0), 0))))
4321 tem = fold_convert (arg1_type, arg1);
4322 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4325 return pedantic_non_lvalue (fold_convert (type, arg1));
4328 if (flag_trapping_math)
4333 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4334 arg1 = fold_convert (lang_hooks.types.signed_type
4335 (TREE_TYPE (arg1)), arg1);
4336 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4337 return pedantic_non_lvalue (fold_convert (type, tem));
4340 if (flag_trapping_math)
4344 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4345 arg1 = fold_convert (lang_hooks.types.signed_type
4346 (TREE_TYPE (arg1)), arg1);
4347 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4348 return negate_expr (fold_convert (type, tem));
4350 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4354 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4355 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4356 both transformations are correct when A is NaN: A != 0
4357 is then true, and A == 0 is false. */
4359 if (integer_zerop (arg01) && integer_zerop (arg2))
4361 if (comp_code == NE_EXPR)
4362 return pedantic_non_lvalue (fold_convert (type, arg1));
4363 else if (comp_code == EQ_EXPR)
4364 return fold_convert (type, integer_zero_node);
4367 /* Try some transformations of A op B ? A : B.
4369 A == B? A : B same as B
4370 A != B? A : B same as A
4371 A >= B? A : B same as max (A, B)
4372 A > B? A : B same as max (B, A)
4373 A <= B? A : B same as min (A, B)
4374 A < B? A : B same as min (B, A)
4376 As above, these transformations don't work in the presence
4377 of signed zeros. For example, if A and B are zeros of
4378 opposite sign, the first two transformations will change
4379 the sign of the result. In the last four, the original
4380 expressions give different results for (A=+0, B=-0) and
4381 (A=-0, B=+0), but the transformed expressions do not.
4383 The first two transformations are correct if either A or B
4384 is a NaN. In the first transformation, the condition will
4385 be false, and B will indeed be chosen. In the case of the
4386 second transformation, the condition A != B will be true,
4387 and A will be chosen.
4389 The conversions to max() and min() are not correct if B is
4390 a number and A is not. The conditions in the original
4391 expressions will be false, so all four give B. The min()
4392 and max() versions would give a NaN instead. */
4393 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4394 /* Avoid these transformations if the COND_EXPR may be used
4395 as an lvalue in the C++ front-end. PR c++/19199. */
4397 || strcmp (lang_hooks.name, "GNU C++") != 0
4398 || ! maybe_lvalue_p (arg1)
4399 || ! maybe_lvalue_p (arg2)))
4401 tree comp_op0 = arg00;
4402 tree comp_op1 = arg01;
4403 tree comp_type = TREE_TYPE (comp_op0);
4405 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4406 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4416 return pedantic_non_lvalue (fold_convert (type, arg2));
4418 return pedantic_non_lvalue (fold_convert (type, arg1));
4423 /* In C++ a ?: expression can be an lvalue, so put the
4424 operand which will be used if they are equal first
4425 so that we can convert this back to the
4426 corresponding COND_EXPR. */
4427 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4429 comp_op0 = fold_convert (comp_type, comp_op0);
4430 comp_op1 = fold_convert (comp_type, comp_op1);
4431 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4432 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4433 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4434 return pedantic_non_lvalue (fold_convert (type, tem));
4441 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4443 comp_op0 = fold_convert (comp_type, comp_op0);
4444 comp_op1 = fold_convert (comp_type, comp_op1);
4445 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4446 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4447 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4448 return pedantic_non_lvalue (fold_convert (type, tem));
4452 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4453 return pedantic_non_lvalue (fold_convert (type, arg2));
4456 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4457 return pedantic_non_lvalue (fold_convert (type, arg1));
4460 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4465 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4466 we might still be able to simplify this. For example,
4467 if C1 is one less or one more than C2, this might have started
4468 out as a MIN or MAX and been transformed by this function.
4469 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4471 if (INTEGRAL_TYPE_P (type)
4472 && TREE_CODE (arg01) == INTEGER_CST
4473 && TREE_CODE (arg2) == INTEGER_CST)
4477 /* We can replace A with C1 in this case. */
4478 arg1 = fold_convert (type, arg01);
4479 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4482 /* If C1 is C2 + 1, this is min(A, C2). */
4483 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4485 && operand_equal_p (arg01,
4486 const_binop (PLUS_EXPR, arg2,
4487 integer_one_node, 0),
4489 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4494 /* If C1 is C2 - 1, this is min(A, C2). */
4495 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4497 && operand_equal_p (arg01,
4498 const_binop (MINUS_EXPR, arg2,
4499 integer_one_node, 0),
4501 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4506 /* If C1 is C2 - 1, this is max(A, C2). */
4507 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4509 && operand_equal_p (arg01,
4510 const_binop (MINUS_EXPR, arg2,
4511 integer_one_node, 0),
4513 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4518 /* If C1 is C2 + 1, this is max(A, C2). */
4519 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4521 && operand_equal_p (arg01,
4522 const_binop (PLUS_EXPR, arg2,
4523 integer_one_node, 0),
4525 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4539 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4540 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4543 /* EXP is some logical combination of boolean tests. See if we can
4544 merge it into some range test. Return the new tree if so. */
4547 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4549 int or_op = (code == TRUTH_ORIF_EXPR
4550 || code == TRUTH_OR_EXPR);
4551 int in0_p, in1_p, in_p;
4552 tree low0, low1, low, high0, high1, high;
4553 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4554 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4557 /* If this is an OR operation, invert both sides; we will invert
4558 again at the end. */
4560 in0_p = ! in0_p, in1_p = ! in1_p;
4562 /* If both expressions are the same, if we can merge the ranges, and we
4563 can build the range test, return it or it inverted. If one of the
4564 ranges is always true or always false, consider it to be the same
4565 expression as the other. */
4566 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4567 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4569 && 0 != (tem = (build_range_check (type,
4571 : rhs != 0 ? rhs : integer_zero_node,
4573 return or_op ? invert_truthvalue (tem) : tem;
4575 /* On machines where the branch cost is expensive, if this is a
4576 short-circuited branch and the underlying object on both sides
4577 is the same, make a non-short-circuit operation. */
4578 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4579 && lhs != 0 && rhs != 0
4580 && (code == TRUTH_ANDIF_EXPR
4581 || code == TRUTH_ORIF_EXPR)
4582 && operand_equal_p (lhs, rhs, 0))
4584 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4585 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4586 which cases we can't do this. */
4587 if (simple_operand_p (lhs))
4588 return build2 (code == TRUTH_ANDIF_EXPR
4589 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4592 else if (lang_hooks.decls.global_bindings_p () == 0
4593 && ! CONTAINS_PLACEHOLDER_P (lhs))
4595 tree common = save_expr (lhs);
4597 if (0 != (lhs = build_range_check (type, common,
4598 or_op ? ! in0_p : in0_p,
4600 && (0 != (rhs = build_range_check (type, common,
4601 or_op ? ! in1_p : in1_p,
4603 return build2 (code == TRUTH_ANDIF_EXPR
4604 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4612 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4613 bit value. Arrange things so the extra bits will be set to zero if and
4614 only if C is signed-extended to its full width. If MASK is nonzero,
4615 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4618 unextend (tree c, int p, int unsignedp, tree mask)
4620 tree type = TREE_TYPE (c);
4621 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4624 if (p == modesize || unsignedp)
4627 /* We work by getting just the sign bit into the low-order bit, then
4628 into the high-order bit, then sign-extend. We then XOR that value
4630 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4631 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4633 /* We must use a signed type in order to get an arithmetic right shift.
4634 However, we must also avoid introducing accidental overflows, so that
4635 a subsequent call to integer_zerop will work. Hence we must
4636 do the type conversion here. At this point, the constant is either
4637 zero or one, and the conversion to a signed type can never overflow.
4638 We could get an overflow if this conversion is done anywhere else. */
4639 if (TYPE_UNSIGNED (type))
4640 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4642 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4643 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4645 temp = const_binop (BIT_AND_EXPR, temp,
4646 fold_convert (TREE_TYPE (c), mask), 0);
4647 /* If necessary, convert the type back to match the type of C. */
4648 if (TYPE_UNSIGNED (type))
4649 temp = fold_convert (type, temp);
4651 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4654 /* Find ways of folding logical expressions of LHS and RHS:
4655 Try to merge two comparisons to the same innermost item.
4656 Look for range tests like "ch >= '0' && ch <= '9'".
4657 Look for combinations of simple terms on machines with expensive branches
4658 and evaluate the RHS unconditionally.
4660 For example, if we have p->a == 2 && p->b == 4 and we can make an
4661 object large enough to span both A and B, we can do this with a comparison
4662 against the object ANDed with the a mask.
4664 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4665 operations to do this with one comparison.
4667 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4668 function and the one above.
4670 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4671 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4673 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4676 We return the simplified tree or 0 if no optimization is possible. */
4679 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4681 /* If this is the "or" of two comparisons, we can do something if
4682 the comparisons are NE_EXPR. If this is the "and", we can do something
4683 if the comparisons are EQ_EXPR. I.e.,
4684 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4686 WANTED_CODE is this operation code. For single bit fields, we can
4687 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4688 comparison for one-bit fields. */
4690 enum tree_code wanted_code;
4691 enum tree_code lcode, rcode;
4692 tree ll_arg, lr_arg, rl_arg, rr_arg;
4693 tree ll_inner, lr_inner, rl_inner, rr_inner;
4694 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4695 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4696 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4697 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4698 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4699 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4700 enum machine_mode lnmode, rnmode;
4701 tree ll_mask, lr_mask, rl_mask, rr_mask;
4702 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4703 tree l_const, r_const;
4704 tree lntype, rntype, result;
4705 int first_bit, end_bit;
4708 /* Start by getting the comparison codes. Fail if anything is volatile.
4709 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4710 it were surrounded with a NE_EXPR. */
4712 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4715 lcode = TREE_CODE (lhs);
4716 rcode = TREE_CODE (rhs);
4718 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4720 lhs = build2 (NE_EXPR, truth_type, lhs,
4721 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4725 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4727 rhs = build2 (NE_EXPR, truth_type, rhs,
4728 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4732 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4733 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4736 ll_arg = TREE_OPERAND (lhs, 0);
4737 lr_arg = TREE_OPERAND (lhs, 1);
4738 rl_arg = TREE_OPERAND (rhs, 0);
4739 rr_arg = TREE_OPERAND (rhs, 1);
4741 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4742 if (simple_operand_p (ll_arg)
4743 && simple_operand_p (lr_arg))
4746 if (operand_equal_p (ll_arg, rl_arg, 0)
4747 && operand_equal_p (lr_arg, rr_arg, 0))
4749 result = combine_comparisons (code, lcode, rcode,
4750 truth_type, ll_arg, lr_arg);
4754 else if (operand_equal_p (ll_arg, rr_arg, 0)
4755 && operand_equal_p (lr_arg, rl_arg, 0))
4757 result = combine_comparisons (code, lcode,
4758 swap_tree_comparison (rcode),
4759 truth_type, ll_arg, lr_arg);
4765 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4766 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4768 /* If the RHS can be evaluated unconditionally and its operands are
4769 simple, it wins to evaluate the RHS unconditionally on machines
4770 with expensive branches. In this case, this isn't a comparison
4771 that can be merged. Avoid doing this if the RHS is a floating-point
4772 comparison since those can trap. */
4774 if (BRANCH_COST >= 2
4775 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4776 && simple_operand_p (rl_arg)
4777 && simple_operand_p (rr_arg))
4779 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4780 if (code == TRUTH_OR_EXPR
4781 && lcode == NE_EXPR && integer_zerop (lr_arg)
4782 && rcode == NE_EXPR && integer_zerop (rr_arg)
4783 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4784 return build2 (NE_EXPR, truth_type,
4785 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4787 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4789 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4790 if (code == TRUTH_AND_EXPR
4791 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4792 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4793 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4794 return build2 (EQ_EXPR, truth_type,
4795 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4797 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4799 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4800 return build2 (code, truth_type, lhs, rhs);
4803 /* See if the comparisons can be merged. Then get all the parameters for
4806 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4807 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4811 ll_inner = decode_field_reference (ll_arg,
4812 &ll_bitsize, &ll_bitpos, &ll_mode,
4813 &ll_unsignedp, &volatilep, &ll_mask,
4815 lr_inner = decode_field_reference (lr_arg,
4816 &lr_bitsize, &lr_bitpos, &lr_mode,
4817 &lr_unsignedp, &volatilep, &lr_mask,
4819 rl_inner = decode_field_reference (rl_arg,
4820 &rl_bitsize, &rl_bitpos, &rl_mode,
4821 &rl_unsignedp, &volatilep, &rl_mask,
4823 rr_inner = decode_field_reference (rr_arg,
4824 &rr_bitsize, &rr_bitpos, &rr_mode,
4825 &rr_unsignedp, &volatilep, &rr_mask,
4828 /* It must be true that the inner operation on the lhs of each
4829 comparison must be the same if we are to be able to do anything.
4830 Then see if we have constants. If not, the same must be true for
4832 if (volatilep || ll_inner == 0 || rl_inner == 0
4833 || ! operand_equal_p (ll_inner, rl_inner, 0))
4836 if (TREE_CODE (lr_arg) == INTEGER_CST
4837 && TREE_CODE (rr_arg) == INTEGER_CST)
4838 l_const = lr_arg, r_const = rr_arg;
4839 else if (lr_inner == 0 || rr_inner == 0
4840 || ! operand_equal_p (lr_inner, rr_inner, 0))
4843 l_const = r_const = 0;
4845 /* If either comparison code is not correct for our logical operation,
4846 fail. However, we can convert a one-bit comparison against zero into
4847 the opposite comparison against that bit being set in the field. */
4849 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4850 if (lcode != wanted_code)
4852 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4854 /* Make the left operand unsigned, since we are only interested
4855 in the value of one bit. Otherwise we are doing the wrong
4864 /* This is analogous to the code for l_const above. */
4865 if (rcode != wanted_code)
4867 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4876 /* After this point all optimizations will generate bit-field
4877 references, which we might not want. */
4878 if (! lang_hooks.can_use_bit_fields_p ())
4881 /* See if we can find a mode that contains both fields being compared on
4882 the left. If we can't, fail. Otherwise, update all constants and masks
4883 to be relative to a field of that size. */
4884 first_bit = MIN (ll_bitpos, rl_bitpos);
4885 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4886 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4887 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4889 if (lnmode == VOIDmode)
4892 lnbitsize = GET_MODE_BITSIZE (lnmode);
4893 lnbitpos = first_bit & ~ (lnbitsize - 1);
4894 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4895 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4897 if (BYTES_BIG_ENDIAN)
4899 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4900 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4903 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4904 size_int (xll_bitpos), 0);
4905 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4906 size_int (xrl_bitpos), 0);
4910 l_const = fold_convert (lntype, l_const);
4911 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4912 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4913 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4914 fold_build1 (BIT_NOT_EXPR,
4918 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4920 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4925 r_const = fold_convert (lntype, r_const);
4926 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4927 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4928 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4929 fold_build1 (BIT_NOT_EXPR,
4933 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4935 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4939 /* If the right sides are not constant, do the same for it. Also,
4940 disallow this optimization if a size or signedness mismatch occurs
4941 between the left and right sides. */
4944 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4945 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4946 /* Make sure the two fields on the right
4947 correspond to the left without being swapped. */
4948 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4951 first_bit = MIN (lr_bitpos, rr_bitpos);
4952 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4953 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4954 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4956 if (rnmode == VOIDmode)
4959 rnbitsize = GET_MODE_BITSIZE (rnmode);
4960 rnbitpos = first_bit & ~ (rnbitsize - 1);
4961 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4962 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4964 if (BYTES_BIG_ENDIAN)
4966 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4967 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4970 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4971 size_int (xlr_bitpos), 0);
4972 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4973 size_int (xrr_bitpos), 0);
4975 /* Make a mask that corresponds to both fields being compared.
4976 Do this for both items being compared. If the operands are the
4977 same size and the bits being compared are in the same position
4978 then we can do this by masking both and comparing the masked
4980 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4981 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4982 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4984 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4985 ll_unsignedp || rl_unsignedp);
4986 if (! all_ones_mask_p (ll_mask, lnbitsize))
4987 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4989 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4990 lr_unsignedp || rr_unsignedp);
4991 if (! all_ones_mask_p (lr_mask, rnbitsize))
4992 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4994 return build2 (wanted_code, truth_type, lhs, rhs);
4997 /* There is still another way we can do something: If both pairs of
4998 fields being compared are adjacent, we may be able to make a wider
4999 field containing them both.
5001 Note that we still must mask the lhs/rhs expressions. Furthermore,
5002 the mask must be shifted to account for the shift done by
5003 make_bit_field_ref. */
5004 if ((ll_bitsize + ll_bitpos == rl_bitpos
5005 && lr_bitsize + lr_bitpos == rr_bitpos)
5006 || (ll_bitpos == rl_bitpos + rl_bitsize
5007 && lr_bitpos == rr_bitpos + rr_bitsize))
5011 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5012 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5013 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5014 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5016 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5017 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5018 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5019 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5021 /* Convert to the smaller type before masking out unwanted bits. */
5023 if (lntype != rntype)
5025 if (lnbitsize > rnbitsize)
5027 lhs = fold_convert (rntype, lhs);
5028 ll_mask = fold_convert (rntype, ll_mask);
5031 else if (lnbitsize < rnbitsize)
5033 rhs = fold_convert (lntype, rhs);
5034 lr_mask = fold_convert (lntype, lr_mask);
5039 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5040 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5042 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5043 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5045 return build2 (wanted_code, truth_type, lhs, rhs);
5051 /* Handle the case of comparisons with constants. If there is something in
5052 common between the masks, those bits of the constants must be the same.
5053 If not, the condition is always false. Test for this to avoid generating
5054 incorrect code below. */
5055 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5056 if (! integer_zerop (result)
5057 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5058 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5060 if (wanted_code == NE_EXPR)
5062 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5063 return constant_boolean_node (true, truth_type);
5067 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5068 return constant_boolean_node (false, truth_type);
5072 /* Construct the expression we will return. First get the component
5073 reference we will make. Unless the mask is all ones the width of
5074 that field, perform the mask operation. Then compare with the
5076 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5077 ll_unsignedp || rl_unsignedp);
5079 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5080 if (! all_ones_mask_p (ll_mask, lnbitsize))
5081 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5083 return build2 (wanted_code, truth_type, result,
5084 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5087 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5091 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5094 enum tree_code op_code;
5095 tree comp_const = op1;
5097 int consts_equal, consts_lt;
5100 STRIP_SIGN_NOPS (arg0);
5102 op_code = TREE_CODE (arg0);
5103 minmax_const = TREE_OPERAND (arg0, 1);
5104 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5105 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5106 inner = TREE_OPERAND (arg0, 0);
5108 /* If something does not permit us to optimize, return the original tree. */
5109 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5110 || TREE_CODE (comp_const) != INTEGER_CST
5111 || TREE_CONSTANT_OVERFLOW (comp_const)
5112 || TREE_CODE (minmax_const) != INTEGER_CST
5113 || TREE_CONSTANT_OVERFLOW (minmax_const))
5116 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5117 and GT_EXPR, doing the rest with recursive calls using logical
5121 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5123 /* FIXME: We should be able to invert code without building a
5124 scratch tree node, but doing so would require us to
5125 duplicate a part of invert_truthvalue here. */
5126 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5127 tem = optimize_minmax_comparison (TREE_CODE (tem),
5129 TREE_OPERAND (tem, 0),
5130 TREE_OPERAND (tem, 1));
5131 return invert_truthvalue (tem);
5136 fold_build2 (TRUTH_ORIF_EXPR, type,
5137 optimize_minmax_comparison
5138 (EQ_EXPR, type, arg0, comp_const),
5139 optimize_minmax_comparison
5140 (GT_EXPR, type, arg0, comp_const));
5143 if (op_code == MAX_EXPR && consts_equal)
5144 /* MAX (X, 0) == 0 -> X <= 0 */
5145 return fold_build2 (LE_EXPR, type, inner, comp_const);
5147 else if (op_code == MAX_EXPR && consts_lt)
5148 /* MAX (X, 0) == 5 -> X == 5 */
5149 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5151 else if (op_code == MAX_EXPR)
5152 /* MAX (X, 0) == -1 -> false */
5153 return omit_one_operand (type, integer_zero_node, inner);
5155 else if (consts_equal)
5156 /* MIN (X, 0) == 0 -> X >= 0 */
5157 return fold_build2 (GE_EXPR, type, inner, comp_const);
5160 /* MIN (X, 0) == 5 -> false */
5161 return omit_one_operand (type, integer_zero_node, inner);
5164 /* MIN (X, 0) == -1 -> X == -1 */
5165 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5168 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5169 /* MAX (X, 0) > 0 -> X > 0
5170 MAX (X, 0) > 5 -> X > 5 */
5171 return fold_build2 (GT_EXPR, type, inner, comp_const);
5173 else if (op_code == MAX_EXPR)
5174 /* MAX (X, 0) > -1 -> true */
5175 return omit_one_operand (type, integer_one_node, inner);
5177 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5178 /* MIN (X, 0) > 0 -> false
5179 MIN (X, 0) > 5 -> false */
5180 return omit_one_operand (type, integer_zero_node, inner);
5183 /* MIN (X, 0) > -1 -> X > -1 */
5184 return fold_build2 (GT_EXPR, type, inner, comp_const);
5191 /* T is an integer expression that is being multiplied, divided, or taken a
5192 modulus (CODE says which and what kind of divide or modulus) by a
5193 constant C. See if we can eliminate that operation by folding it with
5194 other operations already in T. WIDE_TYPE, if non-null, is a type that
5195 should be used for the computation if wider than our type.
5197 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5198 (X * 2) + (Y * 4). We must, however, be assured that either the original
5199 expression would not overflow or that overflow is undefined for the type
5200 in the language in question.
5202 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5203 the machine has a multiply-accumulate insn or that this is part of an
5204 addressing calculation.
5206 If we return a non-null expression, it is an equivalent form of the
5207 original computation, but need not be in the original type. */
5210 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5212 /* To avoid exponential search depth, refuse to allow recursion past
5213 three levels. Beyond that (1) it's highly unlikely that we'll find
5214 something interesting and (2) we've probably processed it before
5215 when we built the inner expression. */
5224 ret = extract_muldiv_1 (t, c, code, wide_type);
5231 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5233 tree type = TREE_TYPE (t);
5234 enum tree_code tcode = TREE_CODE (t);
5235 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5236 > GET_MODE_SIZE (TYPE_MODE (type)))
5237 ? wide_type : type);
5239 int same_p = tcode == code;
5240 tree op0 = NULL_TREE, op1 = NULL_TREE;
5242 /* Don't deal with constants of zero here; they confuse the code below. */
5243 if (integer_zerop (c))
5246 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5247 op0 = TREE_OPERAND (t, 0);
5249 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5250 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5252 /* Note that we need not handle conditional operations here since fold
5253 already handles those cases. So just do arithmetic here. */
5257 /* For a constant, we can always simplify if we are a multiply
5258 or (for divide and modulus) if it is a multiple of our constant. */
5259 if (code == MULT_EXPR
5260 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5261 return const_binop (code, fold_convert (ctype, t),
5262 fold_convert (ctype, c), 0);
5265 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5266 /* If op0 is an expression ... */
5267 if ((COMPARISON_CLASS_P (op0)
5268 || UNARY_CLASS_P (op0)
5269 || BINARY_CLASS_P (op0)
5270 || EXPRESSION_CLASS_P (op0))
5271 /* ... and is unsigned, and its type is smaller than ctype,
5272 then we cannot pass through as widening. */
5273 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5274 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5275 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5276 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5277 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5278 /* ... or this is a truncation (t is narrower than op0),
5279 then we cannot pass through this narrowing. */
5280 || (GET_MODE_SIZE (TYPE_MODE (type))
5281 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5282 /* ... or signedness changes for division or modulus,
5283 then we cannot pass through this conversion. */
5284 || (code != MULT_EXPR
5285 && (TYPE_UNSIGNED (ctype)
5286 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5289 /* Pass the constant down and see if we can make a simplification. If
5290 we can, replace this expression with the inner simplification for
5291 possible later conversion to our or some other type. */
5292 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5293 && TREE_CODE (t2) == INTEGER_CST
5294 && ! TREE_CONSTANT_OVERFLOW (t2)
5295 && (0 != (t1 = extract_muldiv (op0, t2, code,
5297 ? ctype : NULL_TREE))))
5302 /* If widening the type changes it from signed to unsigned, then we
5303 must avoid building ABS_EXPR itself as unsigned. */
5304 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5306 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5307 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5309 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5310 return fold_convert (ctype, t1);
5316 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5317 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5320 case MIN_EXPR: case MAX_EXPR:
5321 /* If widening the type changes the signedness, then we can't perform
5322 this optimization as that changes the result. */
5323 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5326 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5327 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5328 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5330 if (tree_int_cst_sgn (c) < 0)
5331 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5333 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5334 fold_convert (ctype, t2));
5338 case LSHIFT_EXPR: case RSHIFT_EXPR:
5339 /* If the second operand is constant, this is a multiplication
5340 or floor division, by a power of two, so we can treat it that
5341 way unless the multiplier or divisor overflows. Signed
5342 left-shift overflow is implementation-defined rather than
5343 undefined in C90, so do not convert signed left shift into
5345 if (TREE_CODE (op1) == INTEGER_CST
5346 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5347 /* const_binop may not detect overflow correctly,
5348 so check for it explicitly here. */
5349 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5350 && TREE_INT_CST_HIGH (op1) == 0
5351 && 0 != (t1 = fold_convert (ctype,
5352 const_binop (LSHIFT_EXPR,
5355 && ! TREE_OVERFLOW (t1))
5356 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5357 ? MULT_EXPR : FLOOR_DIV_EXPR,
5358 ctype, fold_convert (ctype, op0), t1),
5359 c, code, wide_type);
5362 case PLUS_EXPR: case MINUS_EXPR:
5363 /* See if we can eliminate the operation on both sides. If we can, we
5364 can return a new PLUS or MINUS. If we can't, the only remaining
5365 cases where we can do anything are if the second operand is a
5367 t1 = extract_muldiv (op0, c, code, wide_type);
5368 t2 = extract_muldiv (op1, c, code, wide_type);
5369 if (t1 != 0 && t2 != 0
5370 && (code == MULT_EXPR
5371 /* If not multiplication, we can only do this if both operands
5372 are divisible by c. */
5373 || (multiple_of_p (ctype, op0, c)
5374 && multiple_of_p (ctype, op1, c))))
5375 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5376 fold_convert (ctype, t2));
5378 /* If this was a subtraction, negate OP1 and set it to be an addition.
5379 This simplifies the logic below. */
5380 if (tcode == MINUS_EXPR)
5381 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5383 if (TREE_CODE (op1) != INTEGER_CST)
5386 /* If either OP1 or C are negative, this optimization is not safe for
5387 some of the division and remainder types while for others we need
5388 to change the code. */
5389 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5391 if (code == CEIL_DIV_EXPR)
5392 code = FLOOR_DIV_EXPR;
5393 else if (code == FLOOR_DIV_EXPR)
5394 code = CEIL_DIV_EXPR;
5395 else if (code != MULT_EXPR
5396 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5400 /* If it's a multiply or a division/modulus operation of a multiple
5401 of our constant, do the operation and verify it doesn't overflow. */
5402 if (code == MULT_EXPR
5403 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5405 op1 = const_binop (code, fold_convert (ctype, op1),
5406 fold_convert (ctype, c), 0);
5407 /* We allow the constant to overflow with wrapping semantics. */
5409 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5415 /* If we have an unsigned type is not a sizetype, we cannot widen
5416 the operation since it will change the result if the original
5417 computation overflowed. */
5418 if (TYPE_UNSIGNED (ctype)
5419 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5423 /* If we were able to eliminate our operation from the first side,
5424 apply our operation to the second side and reform the PLUS. */
5425 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5426 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5428 /* The last case is if we are a multiply. In that case, we can
5429 apply the distributive law to commute the multiply and addition
5430 if the multiplication of the constants doesn't overflow. */
5431 if (code == MULT_EXPR)
5432 return fold_build2 (tcode, ctype,
5433 fold_build2 (code, ctype,
5434 fold_convert (ctype, op0),
5435 fold_convert (ctype, c)),
5441 /* We have a special case here if we are doing something like
5442 (C * 8) % 4 since we know that's zero. */
5443 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5444 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5445 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5446 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5447 return omit_one_operand (type, integer_zero_node, op0);
5449 /* ... fall through ... */
5451 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5452 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5453 /* If we can extract our operation from the LHS, do so and return a
5454 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5455 do something only if the second operand is a constant. */
5457 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5458 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5459 fold_convert (ctype, op1));
5460 else if (tcode == MULT_EXPR && code == MULT_EXPR
5461 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5462 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5463 fold_convert (ctype, t1));
5464 else if (TREE_CODE (op1) != INTEGER_CST)
5467 /* If these are the same operation types, we can associate them
5468 assuming no overflow. */
5470 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5471 fold_convert (ctype, c), 0))
5472 && ! TREE_OVERFLOW (t1))
5473 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5475 /* If these operations "cancel" each other, we have the main
5476 optimizations of this pass, which occur when either constant is a
5477 multiple of the other, in which case we replace this with either an
5478 operation or CODE or TCODE.
5480 If we have an unsigned type that is not a sizetype, we cannot do
5481 this since it will change the result if the original computation
5483 if ((! TYPE_UNSIGNED (ctype)
5484 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5486 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5487 || (tcode == MULT_EXPR
5488 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5489 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5491 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5492 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5493 fold_convert (ctype,
5494 const_binop (TRUNC_DIV_EXPR,
5496 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5497 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5498 fold_convert (ctype,
5499 const_binop (TRUNC_DIV_EXPR,
5511 /* Return a node which has the indicated constant VALUE (either 0 or
5512 1), and is of the indicated TYPE. */
5515 constant_boolean_node (int value, tree type)
5517 if (type == integer_type_node)
5518 return value ? integer_one_node : integer_zero_node;
5519 else if (type == boolean_type_node)
5520 return value ? boolean_true_node : boolean_false_node;
5522 return build_int_cst (type, value);
5526 /* Return true if expr looks like an ARRAY_REF and set base and
5527 offset to the appropriate trees. If there is no offset,
5528 offset is set to NULL_TREE. Base will be canonicalized to
5529 something you can get the element type from using
5530 TREE_TYPE (TREE_TYPE (base)). */
5533 extract_array_ref (tree expr, tree *base, tree *offset)
5535 /* One canonical form is a PLUS_EXPR with the first
5536 argument being an ADDR_EXPR with a possible NOP_EXPR
5538 if (TREE_CODE (expr) == PLUS_EXPR)
5540 tree op0 = TREE_OPERAND (expr, 0);
5541 tree inner_base, dummy1;
5542 /* Strip NOP_EXPRs here because the C frontends and/or
5543 folders present us (int *)&x.a + 4B possibly. */
5545 if (extract_array_ref (op0, &inner_base, &dummy1))
5548 if (dummy1 == NULL_TREE)
5549 *offset = TREE_OPERAND (expr, 1);
5551 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5552 dummy1, TREE_OPERAND (expr, 1));
5556 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5557 which we transform into an ADDR_EXPR with appropriate
5558 offset. For other arguments to the ADDR_EXPR we assume
5559 zero offset and as such do not care about the ADDR_EXPR
5560 type and strip possible nops from it. */
5561 else if (TREE_CODE (expr) == ADDR_EXPR)
5563 tree op0 = TREE_OPERAND (expr, 0);
5564 if (TREE_CODE (op0) == ARRAY_REF)
5566 *base = TREE_OPERAND (op0, 0);
5567 *offset = TREE_OPERAND (op0, 1);
5571 /* Handle array-to-pointer decay as &a. */
5572 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5573 *base = TREE_OPERAND (expr, 0);
5576 *offset = NULL_TREE;
5580 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5581 else if (SSA_VAR_P (expr)
5582 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5585 *offset = NULL_TREE;
5593 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5594 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5595 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5596 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5597 COND is the first argument to CODE; otherwise (as in the example
5598 given here), it is the second argument. TYPE is the type of the
5599 original expression. Return NULL_TREE if no simplification is
5603 fold_binary_op_with_conditional_arg (enum tree_code code,
5604 tree type, tree op0, tree op1,
5605 tree cond, tree arg, int cond_first_p)
5607 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5608 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5609 tree test, true_value, false_value;
5610 tree lhs = NULL_TREE;
5611 tree rhs = NULL_TREE;
5613 /* This transformation is only worthwhile if we don't have to wrap
5614 arg in a SAVE_EXPR, and the operation can be simplified on at least
5615 one of the branches once its pushed inside the COND_EXPR. */
5616 if (!TREE_CONSTANT (arg))
5619 if (TREE_CODE (cond) == COND_EXPR)
5621 test = TREE_OPERAND (cond, 0);
5622 true_value = TREE_OPERAND (cond, 1);
5623 false_value = TREE_OPERAND (cond, 2);
5624 /* If this operand throws an expression, then it does not make
5625 sense to try to perform a logical or arithmetic operation
5627 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5629 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5634 tree testtype = TREE_TYPE (cond);
5636 true_value = constant_boolean_node (true, testtype);
5637 false_value = constant_boolean_node (false, testtype);
5640 arg = fold_convert (arg_type, arg);
5643 true_value = fold_convert (cond_type, true_value);
5645 lhs = fold_build2 (code, type, true_value, arg);
5647 lhs = fold_build2 (code, type, arg, true_value);
5651 false_value = fold_convert (cond_type, false_value);
5653 rhs = fold_build2 (code, type, false_value, arg);
5655 rhs = fold_build2 (code, type, arg, false_value);
5658 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5659 return fold_convert (type, test);
5663 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5665 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5666 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5667 ADDEND is the same as X.
5669 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5670 and finite. The problematic cases are when X is zero, and its mode
5671 has signed zeros. In the case of rounding towards -infinity,
5672 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5673 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5676 fold_real_zero_addition_p (tree type, tree addend, int negate)
5678 if (!real_zerop (addend))
5681 /* Don't allow the fold with -fsignaling-nans. */
5682 if (HONOR_SNANS (TYPE_MODE (type)))
5685 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5686 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5689 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5690 if (TREE_CODE (addend) == REAL_CST
5691 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5694 /* The mode has signed zeros, and we have to honor their sign.
5695 In this situation, there is only one case we can return true for.
5696 X - 0 is the same as X unless rounding towards -infinity is
5698 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5701 /* Subroutine of fold() that checks comparisons of built-in math
5702 functions against real constants.
5704 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5705 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5706 is the type of the result and ARG0 and ARG1 are the operands of the
5707 comparison. ARG1 must be a TREE_REAL_CST.
5709 The function returns the constant folded tree if a simplification
5710 can be made, and NULL_TREE otherwise. */
5713 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5714 tree type, tree arg0, tree arg1)
5718 if (BUILTIN_SQRT_P (fcode))
5720 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5721 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5723 c = TREE_REAL_CST (arg1);
5724 if (REAL_VALUE_NEGATIVE (c))
5726 /* sqrt(x) < y is always false, if y is negative. */
5727 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5728 return omit_one_operand (type, integer_zero_node, arg);
5730 /* sqrt(x) > y is always true, if y is negative and we
5731 don't care about NaNs, i.e. negative values of x. */
5732 if (code == NE_EXPR || !HONOR_NANS (mode))
5733 return omit_one_operand (type, integer_one_node, arg);
5735 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5736 return fold_build2 (GE_EXPR, type, arg,
5737 build_real (TREE_TYPE (arg), dconst0));
5739 else if (code == GT_EXPR || code == GE_EXPR)
5743 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5744 real_convert (&c2, mode, &c2);
5746 if (REAL_VALUE_ISINF (c2))
5748 /* sqrt(x) > y is x == +Inf, when y is very large. */
5749 if (HONOR_INFINITIES (mode))
5750 return fold_build2 (EQ_EXPR, type, arg,
5751 build_real (TREE_TYPE (arg), c2));
5753 /* sqrt(x) > y is always false, when y is very large
5754 and we don't care about infinities. */
5755 return omit_one_operand (type, integer_zero_node, arg);
5758 /* sqrt(x) > c is the same as x > c*c. */
5759 return fold_build2 (code, type, arg,
5760 build_real (TREE_TYPE (arg), c2));
5762 else if (code == LT_EXPR || code == LE_EXPR)
5766 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5767 real_convert (&c2, mode, &c2);
5769 if (REAL_VALUE_ISINF (c2))
5771 /* sqrt(x) < y is always true, when y is a very large
5772 value and we don't care about NaNs or Infinities. */
5773 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5774 return omit_one_operand (type, integer_one_node, arg);
5776 /* sqrt(x) < y is x != +Inf when y is very large and we
5777 don't care about NaNs. */
5778 if (! HONOR_NANS (mode))
5779 return fold_build2 (NE_EXPR, type, arg,
5780 build_real (TREE_TYPE (arg), c2));
5782 /* sqrt(x) < y is x >= 0 when y is very large and we
5783 don't care about Infinities. */
5784 if (! HONOR_INFINITIES (mode))
5785 return fold_build2 (GE_EXPR, type, arg,
5786 build_real (TREE_TYPE (arg), dconst0));
5788 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5789 if (lang_hooks.decls.global_bindings_p () != 0
5790 || CONTAINS_PLACEHOLDER_P (arg))
5793 arg = save_expr (arg);
5794 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5795 fold_build2 (GE_EXPR, type, arg,
5796 build_real (TREE_TYPE (arg),
5798 fold_build2 (NE_EXPR, type, arg,
5799 build_real (TREE_TYPE (arg),
5803 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5804 if (! HONOR_NANS (mode))
5805 return fold_build2 (code, type, arg,
5806 build_real (TREE_TYPE (arg), c2));
5808 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5809 if (lang_hooks.decls.global_bindings_p () == 0
5810 && ! CONTAINS_PLACEHOLDER_P (arg))
5812 arg = save_expr (arg);
5813 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5814 fold_build2 (GE_EXPR, type, arg,
5815 build_real (TREE_TYPE (arg),
5817 fold_build2 (code, type, arg,
5818 build_real (TREE_TYPE (arg),
5827 /* Subroutine of fold() that optimizes comparisons against Infinities,
5828 either +Inf or -Inf.
5830 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5831 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5832 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5834 The function returns the constant folded tree if a simplification
5835 can be made, and NULL_TREE otherwise. */
5838 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5840 enum machine_mode mode;
5841 REAL_VALUE_TYPE max;
5845 mode = TYPE_MODE (TREE_TYPE (arg0));
5847 /* For negative infinity swap the sense of the comparison. */
5848 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5850 code = swap_tree_comparison (code);
5855 /* x > +Inf is always false, if with ignore sNANs. */
5856 if (HONOR_SNANS (mode))
5858 return omit_one_operand (type, integer_zero_node, arg0);
5861 /* x <= +Inf is always true, if we don't case about NaNs. */
5862 if (! HONOR_NANS (mode))
5863 return omit_one_operand (type, integer_one_node, arg0);
5865 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5866 if (lang_hooks.decls.global_bindings_p () == 0
5867 && ! CONTAINS_PLACEHOLDER_P (arg0))
5869 arg0 = save_expr (arg0);
5870 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5876 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5877 real_maxval (&max, neg, mode);
5878 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5879 arg0, build_real (TREE_TYPE (arg0), max));
5882 /* x < +Inf is always equal to x <= DBL_MAX. */
5883 real_maxval (&max, neg, mode);
5884 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5885 arg0, build_real (TREE_TYPE (arg0), max));
5888 /* x != +Inf is always equal to !(x > DBL_MAX). */
5889 real_maxval (&max, neg, mode);
5890 if (! HONOR_NANS (mode))
5891 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5892 arg0, build_real (TREE_TYPE (arg0), max));
5894 /* The transformation below creates non-gimple code and thus is
5895 not appropriate if we are in gimple form. */
5899 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5900 arg0, build_real (TREE_TYPE (arg0), max));
5901 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5910 /* Subroutine of fold() that optimizes comparisons of a division by
5911 a nonzero integer constant against an integer constant, i.e.
5914 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5915 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5916 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5918 The function returns the constant folded tree if a simplification
5919 can be made, and NULL_TREE otherwise. */
5922 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5924 tree prod, tmp, hi, lo;
5925 tree arg00 = TREE_OPERAND (arg0, 0);
5926 tree arg01 = TREE_OPERAND (arg0, 1);
5927 unsigned HOST_WIDE_INT lpart;
5928 HOST_WIDE_INT hpart;
5931 /* We have to do this the hard way to detect unsigned overflow.
5932 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5933 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5934 TREE_INT_CST_HIGH (arg01),
5935 TREE_INT_CST_LOW (arg1),
5936 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5937 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5938 prod = force_fit_type (prod, -1, overflow, false);
5940 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5942 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5945 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5946 overflow = add_double (TREE_INT_CST_LOW (prod),
5947 TREE_INT_CST_HIGH (prod),
5948 TREE_INT_CST_LOW (tmp),
5949 TREE_INT_CST_HIGH (tmp),
5951 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5952 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5953 TREE_CONSTANT_OVERFLOW (prod));
5955 else if (tree_int_cst_sgn (arg01) >= 0)
5957 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5958 switch (tree_int_cst_sgn (arg1))
5961 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5966 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5971 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5981 /* A negative divisor reverses the relational operators. */
5982 code = swap_tree_comparison (code);
5984 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5985 switch (tree_int_cst_sgn (arg1))
5988 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5993 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5998 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6010 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6011 return omit_one_operand (type, integer_zero_node, arg00);
6012 if (TREE_OVERFLOW (hi))
6013 return fold_build2 (GE_EXPR, type, arg00, lo);
6014 if (TREE_OVERFLOW (lo))
6015 return fold_build2 (LE_EXPR, type, arg00, hi);
6016 return build_range_check (type, arg00, 1, lo, hi);
6019 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6020 return omit_one_operand (type, integer_one_node, arg00);
6021 if (TREE_OVERFLOW (hi))
6022 return fold_build2 (LT_EXPR, type, arg00, lo);
6023 if (TREE_OVERFLOW (lo))
6024 return fold_build2 (GT_EXPR, type, arg00, hi);
6025 return build_range_check (type, arg00, 0, lo, hi);
6028 if (TREE_OVERFLOW (lo))
6029 return omit_one_operand (type, integer_zero_node, arg00);
6030 return fold_build2 (LT_EXPR, type, arg00, lo);
6033 if (TREE_OVERFLOW (hi))
6034 return omit_one_operand (type, integer_one_node, arg00);
6035 return fold_build2 (LE_EXPR, type, arg00, hi);
6038 if (TREE_OVERFLOW (hi))
6039 return omit_one_operand (type, integer_zero_node, arg00);
6040 return fold_build2 (GT_EXPR, type, arg00, hi);
6043 if (TREE_OVERFLOW (lo))
6044 return omit_one_operand (type, integer_one_node, arg00);
6045 return fold_build2 (GE_EXPR, type, arg00, lo);
6055 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6056 equality/inequality test, then return a simplified form of the test
6057 using a sign testing. Otherwise return NULL. TYPE is the desired
6061 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6064 /* If this is testing a single bit, we can optimize the test. */
6065 if ((code == NE_EXPR || code == EQ_EXPR)
6066 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6067 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6069 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6070 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6071 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6073 if (arg00 != NULL_TREE
6074 /* This is only a win if casting to a signed type is cheap,
6075 i.e. when arg00's type is not a partial mode. */
6076 && TYPE_PRECISION (TREE_TYPE (arg00))
6077 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6079 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6080 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6081 result_type, fold_convert (stype, arg00),
6082 fold_convert (stype, integer_zero_node));
6089 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6090 equality/inequality test, then return a simplified form of
6091 the test using shifts and logical operations. Otherwise return
6092 NULL. TYPE is the desired result type. */
6095 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6098 /* If this is testing a single bit, we can optimize the test. */
6099 if ((code == NE_EXPR || code == EQ_EXPR)
6100 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6101 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6103 tree inner = TREE_OPERAND (arg0, 0);
6104 tree type = TREE_TYPE (arg0);
6105 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6106 enum machine_mode operand_mode = TYPE_MODE (type);
6108 tree signed_type, unsigned_type, intermediate_type;
6111 /* First, see if we can fold the single bit test into a sign-bit
6113 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6118 /* Otherwise we have (A & C) != 0 where C is a single bit,
6119 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6120 Similarly for (A & C) == 0. */
6122 /* If INNER is a right shift of a constant and it plus BITNUM does
6123 not overflow, adjust BITNUM and INNER. */
6124 if (TREE_CODE (inner) == RSHIFT_EXPR
6125 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6126 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6127 && bitnum < TYPE_PRECISION (type)
6128 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6129 bitnum - TYPE_PRECISION (type)))
6131 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6132 inner = TREE_OPERAND (inner, 0);
6135 /* If we are going to be able to omit the AND below, we must do our
6136 operations as unsigned. If we must use the AND, we have a choice.
6137 Normally unsigned is faster, but for some machines signed is. */
6138 #ifdef LOAD_EXTEND_OP
6139 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6140 && !flag_syntax_only) ? 0 : 1;
6145 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6146 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6147 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6148 inner = fold_convert (intermediate_type, inner);
6151 inner = build2 (RSHIFT_EXPR, intermediate_type,
6152 inner, size_int (bitnum));
6154 if (code == EQ_EXPR)
6155 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6156 inner, integer_one_node);
6158 /* Put the AND last so it can combine with more things. */
6159 inner = build2 (BIT_AND_EXPR, intermediate_type,
6160 inner, integer_one_node);
6162 /* Make sure to return the proper type. */
6163 inner = fold_convert (result_type, inner);
6170 /* Check whether we are allowed to reorder operands arg0 and arg1,
6171 such that the evaluation of arg1 occurs before arg0. */
6174 reorder_operands_p (tree arg0, tree arg1)
6176 if (! flag_evaluation_order)
6178 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6180 return ! TREE_SIDE_EFFECTS (arg0)
6181 && ! TREE_SIDE_EFFECTS (arg1);
6184 /* Test whether it is preferable two swap two operands, ARG0 and
6185 ARG1, for example because ARG0 is an integer constant and ARG1
6186 isn't. If REORDER is true, only recommend swapping if we can
6187 evaluate the operands in reverse order. */
6190 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6192 STRIP_SIGN_NOPS (arg0);
6193 STRIP_SIGN_NOPS (arg1);
6195 if (TREE_CODE (arg1) == INTEGER_CST)
6197 if (TREE_CODE (arg0) == INTEGER_CST)
6200 if (TREE_CODE (arg1) == REAL_CST)
6202 if (TREE_CODE (arg0) == REAL_CST)
6205 if (TREE_CODE (arg1) == COMPLEX_CST)
6207 if (TREE_CODE (arg0) == COMPLEX_CST)
6210 if (TREE_CONSTANT (arg1))
6212 if (TREE_CONSTANT (arg0))
6218 if (reorder && flag_evaluation_order
6219 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6227 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6228 for commutative and comparison operators. Ensuring a canonical
6229 form allows the optimizers to find additional redundancies without
6230 having to explicitly check for both orderings. */
6231 if (TREE_CODE (arg0) == SSA_NAME
6232 && TREE_CODE (arg1) == SSA_NAME
6233 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6239 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6240 ARG0 is extended to a wider type. */
6243 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6245 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6247 tree shorter_type, outer_type;
6251 if (arg0_unw == arg0)
6253 shorter_type = TREE_TYPE (arg0_unw);
6255 #ifdef HAVE_canonicalize_funcptr_for_compare
6256 /* Disable this optimization if we're casting a function pointer
6257 type on targets that require function pointer canonicalization. */
6258 if (HAVE_canonicalize_funcptr_for_compare
6259 && TREE_CODE (shorter_type) == POINTER_TYPE
6260 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6264 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6267 arg1_unw = get_unwidened (arg1, shorter_type);
6269 /* If possible, express the comparison in the shorter mode. */
6270 if ((code == EQ_EXPR || code == NE_EXPR
6271 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6272 && (TREE_TYPE (arg1_unw) == shorter_type
6273 || (TREE_CODE (arg1_unw) == INTEGER_CST
6274 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6275 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6276 && int_fits_type_p (arg1_unw, shorter_type))))
6277 return fold_build2 (code, type, arg0_unw,
6278 fold_convert (shorter_type, arg1_unw));
6280 if (TREE_CODE (arg1_unw) != INTEGER_CST
6281 || TREE_CODE (shorter_type) != INTEGER_TYPE
6282 || !int_fits_type_p (arg1_unw, shorter_type))
6285 /* If we are comparing with the integer that does not fit into the range
6286 of the shorter type, the result is known. */
6287 outer_type = TREE_TYPE (arg1_unw);
6288 min = lower_bound_in_type (outer_type, shorter_type);
6289 max = upper_bound_in_type (outer_type, shorter_type);
6291 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6293 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6300 return omit_one_operand (type, integer_zero_node, arg0);
6305 return omit_one_operand (type, integer_one_node, arg0);
6311 return omit_one_operand (type, integer_one_node, arg0);
6313 return omit_one_operand (type, integer_zero_node, arg0);
6318 return omit_one_operand (type, integer_zero_node, arg0);
6320 return omit_one_operand (type, integer_one_node, arg0);
6329 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6330 ARG0 just the signedness is changed. */
6333 fold_sign_changed_comparison (enum tree_code code, tree type,
6334 tree arg0, tree arg1)
6336 tree arg0_inner, tmp;
6337 tree inner_type, outer_type;
6339 if (TREE_CODE (arg0) != NOP_EXPR
6340 && TREE_CODE (arg0) != CONVERT_EXPR)
6343 outer_type = TREE_TYPE (arg0);
6344 arg0_inner = TREE_OPERAND (arg0, 0);
6345 inner_type = TREE_TYPE (arg0_inner);
6347 #ifdef HAVE_canonicalize_funcptr_for_compare
6348 /* Disable this optimization if we're casting a function pointer
6349 type on targets that require function pointer canonicalization. */
6350 if (HAVE_canonicalize_funcptr_for_compare
6351 && TREE_CODE (inner_type) == POINTER_TYPE
6352 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6356 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6359 if (TREE_CODE (arg1) != INTEGER_CST
6360 && !((TREE_CODE (arg1) == NOP_EXPR
6361 || TREE_CODE (arg1) == CONVERT_EXPR)
6362 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6365 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6370 if (TREE_CODE (arg1) == INTEGER_CST)
6372 tmp = build_int_cst_wide (inner_type,
6373 TREE_INT_CST_LOW (arg1),
6374 TREE_INT_CST_HIGH (arg1));
6375 arg1 = force_fit_type (tmp, 0,
6376 TREE_OVERFLOW (arg1),
6377 TREE_CONSTANT_OVERFLOW (arg1));
6380 arg1 = fold_convert (inner_type, arg1);
6382 return fold_build2 (code, type, arg0_inner, arg1);
6385 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6386 step of the array. Reconstructs s and delta in the case of s * delta
6387 being an integer constant (and thus already folded).
6388 ADDR is the address. MULT is the multiplicative expression.
6389 If the function succeeds, the new address expression is returned. Otherwise
6390 NULL_TREE is returned. */
6393 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6395 tree s, delta, step;
6396 tree ref = TREE_OPERAND (addr, 0), pref;
6400 /* Canonicalize op1 into a possibly non-constant delta
6401 and an INTEGER_CST s. */
6402 if (TREE_CODE (op1) == MULT_EXPR)
6404 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6409 if (TREE_CODE (arg0) == INTEGER_CST)
6414 else if (TREE_CODE (arg1) == INTEGER_CST)
6422 else if (TREE_CODE (op1) == INTEGER_CST)
6429 /* Simulate we are delta * 1. */
6431 s = integer_one_node;
6434 for (;; ref = TREE_OPERAND (ref, 0))
6436 if (TREE_CODE (ref) == ARRAY_REF)
6438 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6442 step = array_ref_element_size (ref);
6443 if (TREE_CODE (step) != INTEGER_CST)
6448 if (! tree_int_cst_equal (step, s))
6453 /* Try if delta is a multiple of step. */
6454 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6463 if (!handled_component_p (ref))
6467 /* We found the suitable array reference. So copy everything up to it,
6468 and replace the index. */
6470 pref = TREE_OPERAND (addr, 0);
6471 ret = copy_node (pref);
6476 pref = TREE_OPERAND (pref, 0);
6477 TREE_OPERAND (pos, 0) = copy_node (pref);
6478 pos = TREE_OPERAND (pos, 0);
6481 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6482 fold_convert (itype,
6483 TREE_OPERAND (pos, 1)),
6484 fold_convert (itype, delta));
6486 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6490 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6491 means A >= Y && A != MAX, but in this case we know that
6492 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6495 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6497 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6499 if (TREE_CODE (bound) == LT_EXPR)
6500 a = TREE_OPERAND (bound, 0);
6501 else if (TREE_CODE (bound) == GT_EXPR)
6502 a = TREE_OPERAND (bound, 1);
6506 typea = TREE_TYPE (a);
6507 if (!INTEGRAL_TYPE_P (typea)
6508 && !POINTER_TYPE_P (typea))
6511 if (TREE_CODE (ineq) == LT_EXPR)
6513 a1 = TREE_OPERAND (ineq, 1);
6514 y = TREE_OPERAND (ineq, 0);
6516 else if (TREE_CODE (ineq) == GT_EXPR)
6518 a1 = TREE_OPERAND (ineq, 0);
6519 y = TREE_OPERAND (ineq, 1);
6524 if (TREE_TYPE (a1) != typea)
6527 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6528 if (!integer_onep (diff))
6531 return fold_build2 (GE_EXPR, type, a, y);
6534 /* Fold a unary expression of code CODE and type TYPE with operand
6535 OP0. Return the folded expression if folding is successful.
6536 Otherwise, return NULL_TREE. */
6539 fold_unary (enum tree_code code, tree type, tree op0)
6543 enum tree_code_class kind = TREE_CODE_CLASS (code);
6545 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6546 && TREE_CODE_LENGTH (code) == 1);
6551 if (code == NOP_EXPR || code == CONVERT_EXPR
6552 || code == FLOAT_EXPR || code == ABS_EXPR)
6554 /* Don't use STRIP_NOPS, because signedness of argument type
6556 STRIP_SIGN_NOPS (arg0);
6560 /* Strip any conversions that don't change the mode. This
6561 is safe for every expression, except for a comparison
6562 expression because its signedness is derived from its
6565 Note that this is done as an internal manipulation within
6566 the constant folder, in order to find the simplest
6567 representation of the arguments so that their form can be
6568 studied. In any cases, the appropriate type conversions
6569 should be put back in the tree that will get out of the
6575 if (TREE_CODE_CLASS (code) == tcc_unary)
6577 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6578 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6579 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6580 else if (TREE_CODE (arg0) == COND_EXPR)
6582 tree arg01 = TREE_OPERAND (arg0, 1);
6583 tree arg02 = TREE_OPERAND (arg0, 2);
6584 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6585 arg01 = fold_build1 (code, type, arg01);
6586 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6587 arg02 = fold_build1 (code, type, arg02);
6588 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6591 /* If this was a conversion, and all we did was to move into
6592 inside the COND_EXPR, bring it back out. But leave it if
6593 it is a conversion from integer to integer and the
6594 result precision is no wider than a word since such a
6595 conversion is cheap and may be optimized away by combine,
6596 while it couldn't if it were outside the COND_EXPR. Then return
6597 so we don't get into an infinite recursion loop taking the
6598 conversion out and then back in. */
6600 if ((code == NOP_EXPR || code == CONVERT_EXPR
6601 || code == NON_LVALUE_EXPR)
6602 && TREE_CODE (tem) == COND_EXPR
6603 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6604 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6605 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6606 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6607 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6608 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6609 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6611 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6612 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6613 || flag_syntax_only))
6614 tem = build1 (code, type,
6616 TREE_TYPE (TREE_OPERAND
6617 (TREE_OPERAND (tem, 1), 0)),
6618 TREE_OPERAND (tem, 0),
6619 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6620 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6623 else if (COMPARISON_CLASS_P (arg0))
6625 if (TREE_CODE (type) == BOOLEAN_TYPE)
6627 arg0 = copy_node (arg0);
6628 TREE_TYPE (arg0) = type;
6631 else if (TREE_CODE (type) != INTEGER_TYPE)
6632 return fold_build3 (COND_EXPR, type, arg0,
6633 fold_build1 (code, type,
6635 fold_build1 (code, type,
6636 integer_zero_node));
6645 case FIX_TRUNC_EXPR:
6647 case FIX_FLOOR_EXPR:
6648 case FIX_ROUND_EXPR:
6649 if (TREE_TYPE (op0) == type)
6652 /* Handle cases of two conversions in a row. */
6653 if (TREE_CODE (op0) == NOP_EXPR
6654 || TREE_CODE (op0) == CONVERT_EXPR)
6656 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6657 tree inter_type = TREE_TYPE (op0);
6658 int inside_int = INTEGRAL_TYPE_P (inside_type);
6659 int inside_ptr = POINTER_TYPE_P (inside_type);
6660 int inside_float = FLOAT_TYPE_P (inside_type);
6661 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6662 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6663 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6664 int inter_int = INTEGRAL_TYPE_P (inter_type);
6665 int inter_ptr = POINTER_TYPE_P (inter_type);
6666 int inter_float = FLOAT_TYPE_P (inter_type);
6667 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6668 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6669 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6670 int final_int = INTEGRAL_TYPE_P (type);
6671 int final_ptr = POINTER_TYPE_P (type);
6672 int final_float = FLOAT_TYPE_P (type);
6673 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6674 unsigned int final_prec = TYPE_PRECISION (type);
6675 int final_unsignedp = TYPE_UNSIGNED (type);
6677 /* In addition to the cases of two conversions in a row
6678 handled below, if we are converting something to its own
6679 type via an object of identical or wider precision, neither
6680 conversion is needed. */
6681 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6682 && ((inter_int && final_int) || (inter_float && final_float))
6683 && inter_prec >= final_prec)
6684 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6686 /* Likewise, if the intermediate and final types are either both
6687 float or both integer, we don't need the middle conversion if
6688 it is wider than the final type and doesn't change the signedness
6689 (for integers). Avoid this if the final type is a pointer
6690 since then we sometimes need the inner conversion. Likewise if
6691 the outer has a precision not equal to the size of its mode. */
6692 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6693 || (inter_float && inside_float)
6694 || (inter_vec && inside_vec))
6695 && inter_prec >= inside_prec
6696 && (inter_float || inter_vec
6697 || inter_unsignedp == inside_unsignedp)
6698 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6699 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6701 && (! final_vec || inter_prec == inside_prec))
6702 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6704 /* If we have a sign-extension of a zero-extended value, we can
6705 replace that by a single zero-extension. */
6706 if (inside_int && inter_int && final_int
6707 && inside_prec < inter_prec && inter_prec < final_prec
6708 && inside_unsignedp && !inter_unsignedp)
6709 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6711 /* Two conversions in a row are not needed unless:
6712 - some conversion is floating-point (overstrict for now), or
6713 - some conversion is a vector (overstrict for now), or
6714 - the intermediate type is narrower than both initial and
6716 - the intermediate type and innermost type differ in signedness,
6717 and the outermost type is wider than the intermediate, or
6718 - the initial type is a pointer type and the precisions of the
6719 intermediate and final types differ, or
6720 - the final type is a pointer type and the precisions of the
6721 initial and intermediate types differ. */
6722 if (! inside_float && ! inter_float && ! final_float
6723 && ! inside_vec && ! inter_vec && ! final_vec
6724 && (inter_prec > inside_prec || inter_prec > final_prec)
6725 && ! (inside_int && inter_int
6726 && inter_unsignedp != inside_unsignedp
6727 && inter_prec < final_prec)
6728 && ((inter_unsignedp && inter_prec > inside_prec)
6729 == (final_unsignedp && final_prec > inter_prec))
6730 && ! (inside_ptr && inter_prec != final_prec)
6731 && ! (final_ptr && inside_prec != inter_prec)
6732 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6733 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6735 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6738 /* Handle (T *)&A.B.C for A being of type T and B and C
6739 living at offset zero. This occurs frequently in
6740 C++ upcasting and then accessing the base. */
6741 if (TREE_CODE (op0) == ADDR_EXPR
6742 && POINTER_TYPE_P (type)
6743 && handled_component_p (TREE_OPERAND (op0, 0)))
6745 HOST_WIDE_INT bitsize, bitpos;
6747 enum machine_mode mode;
6748 int unsignedp, volatilep;
6749 tree base = TREE_OPERAND (op0, 0);
6750 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6751 &mode, &unsignedp, &volatilep, false);
6752 /* If the reference was to a (constant) zero offset, we can use
6753 the address of the base if it has the same base type
6754 as the result type. */
6755 if (! offset && bitpos == 0
6756 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6757 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6758 return fold_convert (type, build_fold_addr_expr (base));
6761 if (TREE_CODE (op0) == MODIFY_EXPR
6762 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6763 /* Detect assigning a bitfield. */
6764 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6765 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6767 /* Don't leave an assignment inside a conversion
6768 unless assigning a bitfield. */
6769 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6770 /* First do the assignment, then return converted constant. */
6771 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6772 TREE_NO_WARNING (tem) = 1;
6773 TREE_USED (tem) = 1;
6777 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6778 constants (if x has signed type, the sign bit cannot be set
6779 in c). This folds extension into the BIT_AND_EXPR. */
6780 if (INTEGRAL_TYPE_P (type)
6781 && TREE_CODE (type) != BOOLEAN_TYPE
6782 && TREE_CODE (op0) == BIT_AND_EXPR
6783 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6786 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6789 if (TYPE_UNSIGNED (TREE_TYPE (and))
6790 || (TYPE_PRECISION (type)
6791 <= TYPE_PRECISION (TREE_TYPE (and))))
6793 else if (TYPE_PRECISION (TREE_TYPE (and1))
6794 <= HOST_BITS_PER_WIDE_INT
6795 && host_integerp (and1, 1))
6797 unsigned HOST_WIDE_INT cst;
6799 cst = tree_low_cst (and1, 1);
6800 cst &= (HOST_WIDE_INT) -1
6801 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6802 change = (cst == 0);
6803 #ifdef LOAD_EXTEND_OP
6805 && !flag_syntax_only
6806 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6809 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6810 and0 = fold_convert (uns, and0);
6811 and1 = fold_convert (uns, and1);
6817 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6818 TREE_INT_CST_HIGH (and1));
6819 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6820 TREE_CONSTANT_OVERFLOW (and1));
6821 return fold_build2 (BIT_AND_EXPR, type,
6822 fold_convert (type, and0), tem);
6826 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6827 T2 being pointers to types of the same size. */
6828 if (POINTER_TYPE_P (type)
6829 && BINARY_CLASS_P (arg0)
6830 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6831 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6833 tree arg00 = TREE_OPERAND (arg0, 0);
6835 tree t1 = TREE_TYPE (arg00);
6836 tree tt0 = TREE_TYPE (t0);
6837 tree tt1 = TREE_TYPE (t1);
6838 tree s0 = TYPE_SIZE (tt0);
6839 tree s1 = TYPE_SIZE (tt1);
6841 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6842 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6843 TREE_OPERAND (arg0, 1));
6846 tem = fold_convert_const (code, type, arg0);
6847 return tem ? tem : NULL_TREE;
6849 case VIEW_CONVERT_EXPR:
6850 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6851 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6855 if (negate_expr_p (arg0))
6856 return fold_convert (type, negate_expr (arg0));
6857 /* Convert - (~A) to A + 1. */
6858 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6859 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6860 build_int_cst (type, 1));
6864 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6865 return fold_abs_const (arg0, type);
6866 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6867 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6868 /* Convert fabs((double)float) into (double)fabsf(float). */
6869 else if (TREE_CODE (arg0) == NOP_EXPR
6870 && TREE_CODE (type) == REAL_TYPE)
6872 tree targ0 = strip_float_extensions (arg0);
6874 return fold_convert (type, fold_build1 (ABS_EXPR,
6878 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6879 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6882 /* Strip sign ops from argument. */
6883 if (TREE_CODE (type) == REAL_TYPE)
6885 tem = fold_strip_sign_ops (arg0);
6887 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6892 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6893 return fold_convert (type, arg0);
6894 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6895 return build2 (COMPLEX_EXPR, type,
6896 TREE_OPERAND (arg0, 0),
6897 negate_expr (TREE_OPERAND (arg0, 1)));
6898 else if (TREE_CODE (arg0) == COMPLEX_CST)
6899 return build_complex (type, TREE_REALPART (arg0),
6900 negate_expr (TREE_IMAGPART (arg0)));
6901 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6902 return fold_build2 (TREE_CODE (arg0), type,
6903 fold_build1 (CONJ_EXPR, type,
6904 TREE_OPERAND (arg0, 0)),
6905 fold_build1 (CONJ_EXPR, type,
6906 TREE_OPERAND (arg0, 1)));
6907 else if (TREE_CODE (arg0) == CONJ_EXPR)
6908 return TREE_OPERAND (arg0, 0);
6912 if (TREE_CODE (arg0) == INTEGER_CST)
6913 return fold_not_const (arg0, type);
6914 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6915 return TREE_OPERAND (arg0, 0);
6916 /* Convert ~ (-A) to A - 1. */
6917 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6918 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6919 build_int_cst (type, 1));
6920 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6921 else if (INTEGRAL_TYPE_P (type)
6922 && ((TREE_CODE (arg0) == MINUS_EXPR
6923 && integer_onep (TREE_OPERAND (arg0, 1)))
6924 || (TREE_CODE (arg0) == PLUS_EXPR
6925 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6926 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6927 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6928 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6929 && (tem = fold_unary (BIT_NOT_EXPR, type,
6931 TREE_OPERAND (arg0, 0)))))
6932 return fold_build2 (BIT_XOR_EXPR, type, tem,
6933 fold_convert (type, TREE_OPERAND (arg0, 1)));
6934 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6935 && (tem = fold_unary (BIT_NOT_EXPR, type,
6937 TREE_OPERAND (arg0, 1)))))
6938 return fold_build2 (BIT_XOR_EXPR, type,
6939 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6943 case TRUTH_NOT_EXPR:
6944 /* The argument to invert_truthvalue must have Boolean type. */
6945 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6946 arg0 = fold_convert (boolean_type_node, arg0);
6948 /* Note that the operand of this must be an int
6949 and its values must be 0 or 1.
6950 ("true" is a fixed value perhaps depending on the language,
6951 but we don't handle values other than 1 correctly yet.) */
6952 tem = invert_truthvalue (arg0);
6953 /* Avoid infinite recursion. */
6954 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6956 return fold_convert (type, tem);
6959 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6961 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6962 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6963 TREE_OPERAND (arg0, 1));
6964 else if (TREE_CODE (arg0) == COMPLEX_CST)
6965 return TREE_REALPART (arg0);
6966 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6967 return fold_build2 (TREE_CODE (arg0), type,
6968 fold_build1 (REALPART_EXPR, type,
6969 TREE_OPERAND (arg0, 0)),
6970 fold_build1 (REALPART_EXPR, type,
6971 TREE_OPERAND (arg0, 1)));
6975 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6976 return fold_convert (type, integer_zero_node);
6977 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6978 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6979 TREE_OPERAND (arg0, 0));
6980 else if (TREE_CODE (arg0) == COMPLEX_CST)
6981 return TREE_IMAGPART (arg0);
6982 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6983 return fold_build2 (TREE_CODE (arg0), type,
6984 fold_build1 (IMAGPART_EXPR, type,
6985 TREE_OPERAND (arg0, 0)),
6986 fold_build1 (IMAGPART_EXPR, type,
6987 TREE_OPERAND (arg0, 1)));
6992 } /* switch (code) */
6995 /* Fold a binary expression of code CODE and type TYPE with operands
6996 OP0 and OP1. Return the folded expression if folding is
6997 successful. Otherwise, return NULL_TREE. */
7000 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7002 tree t1 = NULL_TREE;
7004 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7005 enum tree_code_class kind = TREE_CODE_CLASS (code);
7007 /* WINS will be nonzero when the switch is done
7008 if all operands are constant. */
7011 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7012 && TREE_CODE_LENGTH (code) == 2);
7021 /* Strip any conversions that don't change the mode. This is
7022 safe for every expression, except for a comparison expression
7023 because its signedness is derived from its operands. So, in
7024 the latter case, only strip conversions that don't change the
7027 Note that this is done as an internal manipulation within the
7028 constant folder, in order to find the simplest representation
7029 of the arguments so that their form can be studied. In any
7030 cases, the appropriate type conversions should be put back in
7031 the tree that will get out of the constant folder. */
7032 if (kind == tcc_comparison)
7033 STRIP_SIGN_NOPS (arg0);
7037 if (TREE_CODE (arg0) == COMPLEX_CST)
7038 subop = TREE_REALPART (arg0);
7042 if (TREE_CODE (subop) != INTEGER_CST
7043 && TREE_CODE (subop) != REAL_CST)
7044 /* Note that TREE_CONSTANT isn't enough:
7045 static var addresses are constant but we can't
7046 do arithmetic on them. */
7054 /* Strip any conversions that don't change the mode. This is
7055 safe for every expression, except for a comparison expression
7056 because its signedness is derived from its operands. So, in
7057 the latter case, only strip conversions that don't change the
7060 Note that this is done as an internal manipulation within the
7061 constant folder, in order to find the simplest representation
7062 of the arguments so that their form can be studied. In any
7063 cases, the appropriate type conversions should be put back in
7064 the tree that will get out of the constant folder. */
7065 if (kind == tcc_comparison)
7066 STRIP_SIGN_NOPS (arg1);
7070 if (TREE_CODE (arg1) == COMPLEX_CST)
7071 subop = TREE_REALPART (arg1);
7075 if (TREE_CODE (subop) != INTEGER_CST
7076 && TREE_CODE (subop) != REAL_CST)
7077 /* Note that TREE_CONSTANT isn't enough:
7078 static var addresses are constant but we can't
7079 do arithmetic on them. */
7083 /* If this is a commutative operation, and ARG0 is a constant, move it
7084 to ARG1 to reduce the number of tests below. */
7085 if (commutative_tree_code (code)
7086 && tree_swap_operands_p (arg0, arg1, true))
7087 return fold_build2 (code, type, op1, op0);
7089 /* Now WINS is set as described above,
7090 ARG0 is the first operand of EXPR,
7091 and ARG1 is the second operand (if it has more than one operand).
7093 First check for cases where an arithmetic operation is applied to a
7094 compound, conditional, or comparison operation. Push the arithmetic
7095 operation inside the compound or conditional to see if any folding
7096 can then be done. Convert comparison to conditional for this purpose.
7097 The also optimizes non-constant cases that used to be done in
7100 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7101 one of the operands is a comparison and the other is a comparison, a
7102 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7103 code below would make the expression more complex. Change it to a
7104 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7105 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7107 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7108 || code == EQ_EXPR || code == NE_EXPR)
7109 && ((truth_value_p (TREE_CODE (arg0))
7110 && (truth_value_p (TREE_CODE (arg1))
7111 || (TREE_CODE (arg1) == BIT_AND_EXPR
7112 && integer_onep (TREE_OPERAND (arg1, 1)))))
7113 || (truth_value_p (TREE_CODE (arg1))
7114 && (truth_value_p (TREE_CODE (arg0))
7115 || (TREE_CODE (arg0) == BIT_AND_EXPR
7116 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7118 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7119 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7122 fold_convert (boolean_type_node, arg0),
7123 fold_convert (boolean_type_node, arg1));
7125 if (code == EQ_EXPR)
7126 tem = invert_truthvalue (tem);
7128 return fold_convert (type, tem);
7131 if (TREE_CODE_CLASS (code) == tcc_binary
7132 || TREE_CODE_CLASS (code) == tcc_comparison)
7134 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7135 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7136 fold_build2 (code, type,
7137 TREE_OPERAND (arg0, 1), op1));
7138 if (TREE_CODE (arg1) == COMPOUND_EXPR
7139 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7140 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7141 fold_build2 (code, type,
7142 op0, TREE_OPERAND (arg1, 1)));
7144 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7146 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7148 /*cond_first_p=*/1);
7149 if (tem != NULL_TREE)
7153 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7155 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7157 /*cond_first_p=*/0);
7158 if (tem != NULL_TREE)
7166 /* A + (-B) -> A - B */
7167 if (TREE_CODE (arg1) == NEGATE_EXPR)
7168 return fold_build2 (MINUS_EXPR, type,
7169 fold_convert (type, arg0),
7170 fold_convert (type, TREE_OPERAND (arg1, 0)));
7171 /* (-A) + B -> B - A */
7172 if (TREE_CODE (arg0) == NEGATE_EXPR
7173 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7174 return fold_build2 (MINUS_EXPR, type,
7175 fold_convert (type, arg1),
7176 fold_convert (type, TREE_OPERAND (arg0, 0)));
7177 /* Convert ~A + 1 to -A. */
7178 if (INTEGRAL_TYPE_P (type)
7179 && TREE_CODE (arg0) == BIT_NOT_EXPR
7180 && integer_onep (arg1))
7181 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7183 if (! FLOAT_TYPE_P (type))
7185 if (integer_zerop (arg1))
7186 return non_lvalue (fold_convert (type, arg0));
7188 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7189 with a constant, and the two constants have no bits in common,
7190 we should treat this as a BIT_IOR_EXPR since this may produce more
7192 if (TREE_CODE (arg0) == BIT_AND_EXPR
7193 && TREE_CODE (arg1) == BIT_AND_EXPR
7194 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7195 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7196 && integer_zerop (const_binop (BIT_AND_EXPR,
7197 TREE_OPERAND (arg0, 1),
7198 TREE_OPERAND (arg1, 1), 0)))
7200 code = BIT_IOR_EXPR;
7204 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7205 (plus (plus (mult) (mult)) (foo)) so that we can
7206 take advantage of the factoring cases below. */
7207 if (((TREE_CODE (arg0) == PLUS_EXPR
7208 || TREE_CODE (arg0) == MINUS_EXPR)
7209 && TREE_CODE (arg1) == MULT_EXPR)
7210 || ((TREE_CODE (arg1) == PLUS_EXPR
7211 || TREE_CODE (arg1) == MINUS_EXPR)
7212 && TREE_CODE (arg0) == MULT_EXPR))
7214 tree parg0, parg1, parg, marg;
7215 enum tree_code pcode;
7217 if (TREE_CODE (arg1) == MULT_EXPR)
7218 parg = arg0, marg = arg1;
7220 parg = arg1, marg = arg0;
7221 pcode = TREE_CODE (parg);
7222 parg0 = TREE_OPERAND (parg, 0);
7223 parg1 = TREE_OPERAND (parg, 1);
7227 if (TREE_CODE (parg0) == MULT_EXPR
7228 && TREE_CODE (parg1) != MULT_EXPR)
7229 return fold_build2 (pcode, type,
7230 fold_build2 (PLUS_EXPR, type,
7231 fold_convert (type, parg0),
7232 fold_convert (type, marg)),
7233 fold_convert (type, parg1));
7234 if (TREE_CODE (parg0) != MULT_EXPR
7235 && TREE_CODE (parg1) == MULT_EXPR)
7236 return fold_build2 (PLUS_EXPR, type,
7237 fold_convert (type, parg0),
7238 fold_build2 (pcode, type,
7239 fold_convert (type, marg),
7244 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7246 tree arg00, arg01, arg10, arg11;
7247 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7249 /* (A * C) + (B * C) -> (A+B) * C.
7250 We are most concerned about the case where C is a constant,
7251 but other combinations show up during loop reduction. Since
7252 it is not difficult, try all four possibilities. */
7254 arg00 = TREE_OPERAND (arg0, 0);
7255 arg01 = TREE_OPERAND (arg0, 1);
7256 arg10 = TREE_OPERAND (arg1, 0);
7257 arg11 = TREE_OPERAND (arg1, 1);
7260 if (operand_equal_p (arg01, arg11, 0))
7261 same = arg01, alt0 = arg00, alt1 = arg10;
7262 else if (operand_equal_p (arg00, arg10, 0))
7263 same = arg00, alt0 = arg01, alt1 = arg11;
7264 else if (operand_equal_p (arg00, arg11, 0))
7265 same = arg00, alt0 = arg01, alt1 = arg10;
7266 else if (operand_equal_p (arg01, arg10, 0))
7267 same = arg01, alt0 = arg00, alt1 = arg11;
7269 /* No identical multiplicands; see if we can find a common
7270 power-of-two factor in non-power-of-two multiplies. This
7271 can help in multi-dimensional array access. */
7272 else if (TREE_CODE (arg01) == INTEGER_CST
7273 && TREE_CODE (arg11) == INTEGER_CST
7274 && TREE_INT_CST_HIGH (arg01) == 0
7275 && TREE_INT_CST_HIGH (arg11) == 0)
7277 HOST_WIDE_INT int01, int11, tmp;
7278 int01 = TREE_INT_CST_LOW (arg01);
7279 int11 = TREE_INT_CST_LOW (arg11);
7281 /* Move min of absolute values to int11. */
7282 if ((int01 >= 0 ? int01 : -int01)
7283 < (int11 >= 0 ? int11 : -int11))
7285 tmp = int01, int01 = int11, int11 = tmp;
7286 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7287 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7290 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7292 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7293 build_int_cst (NULL_TREE,
7301 return fold_build2 (MULT_EXPR, type,
7302 fold_build2 (PLUS_EXPR, type,
7303 fold_convert (type, alt0),
7304 fold_convert (type, alt1)),
7305 fold_convert (type, same));
7308 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7309 of the array. Loop optimizer sometimes produce this type of
7311 if (TREE_CODE (arg0) == ADDR_EXPR)
7313 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7315 return fold_convert (type, tem);
7317 else if (TREE_CODE (arg1) == ADDR_EXPR)
7319 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7321 return fold_convert (type, tem);
7326 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7327 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7328 return non_lvalue (fold_convert (type, arg0));
7330 /* Likewise if the operands are reversed. */
7331 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7332 return non_lvalue (fold_convert (type, arg1));
7334 /* Convert X + -C into X - C. */
7335 if (TREE_CODE (arg1) == REAL_CST
7336 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7338 tem = fold_negate_const (arg1, type);
7339 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7340 return fold_build2 (MINUS_EXPR, type,
7341 fold_convert (type, arg0),
7342 fold_convert (type, tem));
7345 if (flag_unsafe_math_optimizations
7346 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7347 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7348 && (tem = distribute_real_division (code, type, arg0, arg1)))
7351 /* Convert x+x into x*2.0. */
7352 if (operand_equal_p (arg0, arg1, 0)
7353 && SCALAR_FLOAT_TYPE_P (type))
7354 return fold_build2 (MULT_EXPR, type, arg0,
7355 build_real (type, dconst2));
7357 /* Convert x*c+x into x*(c+1). */
7358 if (flag_unsafe_math_optimizations
7359 && TREE_CODE (arg0) == MULT_EXPR
7360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7361 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7362 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7366 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7367 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7368 return fold_build2 (MULT_EXPR, type, arg1,
7369 build_real (type, c));
7372 /* Convert x+x*c into x*(c+1). */
7373 if (flag_unsafe_math_optimizations
7374 && TREE_CODE (arg1) == MULT_EXPR
7375 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7376 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7377 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7381 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7382 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7383 return fold_build2 (MULT_EXPR, type, arg0,
7384 build_real (type, c));
7387 /* Convert x*c1+x*c2 into x*(c1+c2). */
7388 if (flag_unsafe_math_optimizations
7389 && TREE_CODE (arg0) == MULT_EXPR
7390 && TREE_CODE (arg1) == MULT_EXPR
7391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7392 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7393 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7394 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7395 && operand_equal_p (TREE_OPERAND (arg0, 0),
7396 TREE_OPERAND (arg1, 0), 0))
7398 REAL_VALUE_TYPE c1, c2;
7400 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7401 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7402 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7403 return fold_build2 (MULT_EXPR, type,
7404 TREE_OPERAND (arg0, 0),
7405 build_real (type, c1));
7407 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7408 if (flag_unsafe_math_optimizations
7409 && TREE_CODE (arg1) == PLUS_EXPR
7410 && TREE_CODE (arg0) != MULT_EXPR)
7412 tree tree10 = TREE_OPERAND (arg1, 0);
7413 tree tree11 = TREE_OPERAND (arg1, 1);
7414 if (TREE_CODE (tree11) == MULT_EXPR
7415 && TREE_CODE (tree10) == MULT_EXPR)
7418 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7419 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7422 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7423 if (flag_unsafe_math_optimizations
7424 && TREE_CODE (arg0) == PLUS_EXPR
7425 && TREE_CODE (arg1) != MULT_EXPR)
7427 tree tree00 = TREE_OPERAND (arg0, 0);
7428 tree tree01 = TREE_OPERAND (arg0, 1);
7429 if (TREE_CODE (tree01) == MULT_EXPR
7430 && TREE_CODE (tree00) == MULT_EXPR)
7433 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7434 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7440 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7441 is a rotate of A by C1 bits. */
7442 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7443 is a rotate of A by B bits. */
7445 enum tree_code code0, code1;
7446 code0 = TREE_CODE (arg0);
7447 code1 = TREE_CODE (arg1);
7448 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7449 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7450 && operand_equal_p (TREE_OPERAND (arg0, 0),
7451 TREE_OPERAND (arg1, 0), 0)
7452 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7454 tree tree01, tree11;
7455 enum tree_code code01, code11;
7457 tree01 = TREE_OPERAND (arg0, 1);
7458 tree11 = TREE_OPERAND (arg1, 1);
7459 STRIP_NOPS (tree01);
7460 STRIP_NOPS (tree11);
7461 code01 = TREE_CODE (tree01);
7462 code11 = TREE_CODE (tree11);
7463 if (code01 == INTEGER_CST
7464 && code11 == INTEGER_CST
7465 && TREE_INT_CST_HIGH (tree01) == 0
7466 && TREE_INT_CST_HIGH (tree11) == 0
7467 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7468 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7469 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7470 code0 == LSHIFT_EXPR ? tree01 : tree11);
7471 else if (code11 == MINUS_EXPR)
7473 tree tree110, tree111;
7474 tree110 = TREE_OPERAND (tree11, 0);
7475 tree111 = TREE_OPERAND (tree11, 1);
7476 STRIP_NOPS (tree110);
7477 STRIP_NOPS (tree111);
7478 if (TREE_CODE (tree110) == INTEGER_CST
7479 && 0 == compare_tree_int (tree110,
7481 (TREE_TYPE (TREE_OPERAND
7483 && operand_equal_p (tree01, tree111, 0))
7484 return build2 ((code0 == LSHIFT_EXPR
7487 type, TREE_OPERAND (arg0, 0), tree01);
7489 else if (code01 == MINUS_EXPR)
7491 tree tree010, tree011;
7492 tree010 = TREE_OPERAND (tree01, 0);
7493 tree011 = TREE_OPERAND (tree01, 1);
7494 STRIP_NOPS (tree010);
7495 STRIP_NOPS (tree011);
7496 if (TREE_CODE (tree010) == INTEGER_CST
7497 && 0 == compare_tree_int (tree010,
7499 (TREE_TYPE (TREE_OPERAND
7501 && operand_equal_p (tree11, tree011, 0))
7502 return build2 ((code0 != LSHIFT_EXPR
7505 type, TREE_OPERAND (arg0, 0), tree11);
7511 /* In most languages, can't associate operations on floats through
7512 parentheses. Rather than remember where the parentheses were, we
7513 don't associate floats at all, unless the user has specified
7514 -funsafe-math-optimizations. */
7517 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7519 tree var0, con0, lit0, minus_lit0;
7520 tree var1, con1, lit1, minus_lit1;
7522 /* Split both trees into variables, constants, and literals. Then
7523 associate each group together, the constants with literals,
7524 then the result with variables. This increases the chances of
7525 literals being recombined later and of generating relocatable
7526 expressions for the sum of a constant and literal. */
7527 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7528 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7529 code == MINUS_EXPR);
7531 /* Only do something if we found more than two objects. Otherwise,
7532 nothing has changed and we risk infinite recursion. */
7533 if (2 < ((var0 != 0) + (var1 != 0)
7534 + (con0 != 0) + (con1 != 0)
7535 + (lit0 != 0) + (lit1 != 0)
7536 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7538 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7539 if (code == MINUS_EXPR)
7542 var0 = associate_trees (var0, var1, code, type);
7543 con0 = associate_trees (con0, con1, code, type);
7544 lit0 = associate_trees (lit0, lit1, code, type);
7545 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7547 /* Preserve the MINUS_EXPR if the negative part of the literal is
7548 greater than the positive part. Otherwise, the multiplicative
7549 folding code (i.e extract_muldiv) may be fooled in case
7550 unsigned constants are subtracted, like in the following
7551 example: ((X*2 + 4) - 8U)/2. */
7552 if (minus_lit0 && lit0)
7554 if (TREE_CODE (lit0) == INTEGER_CST
7555 && TREE_CODE (minus_lit0) == INTEGER_CST
7556 && tree_int_cst_lt (lit0, minus_lit0))
7558 minus_lit0 = associate_trees (minus_lit0, lit0,
7564 lit0 = associate_trees (lit0, minus_lit0,
7572 return fold_convert (type,
7573 associate_trees (var0, minus_lit0,
7577 con0 = associate_trees (con0, minus_lit0,
7579 return fold_convert (type,
7580 associate_trees (var0, con0,
7585 con0 = associate_trees (con0, lit0, code, type);
7586 return fold_convert (type, associate_trees (var0, con0,
7593 t1 = const_binop (code, arg0, arg1, 0);
7594 if (t1 != NULL_TREE)
7596 /* The return value should always have
7597 the same type as the original expression. */
7598 if (TREE_TYPE (t1) != type)
7599 t1 = fold_convert (type, t1);
7606 /* A - (-B) -> A + B */
7607 if (TREE_CODE (arg1) == NEGATE_EXPR)
7608 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7609 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7610 if (TREE_CODE (arg0) == NEGATE_EXPR
7611 && (FLOAT_TYPE_P (type)
7612 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7613 && negate_expr_p (arg1)
7614 && reorder_operands_p (arg0, arg1))
7615 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7616 TREE_OPERAND (arg0, 0));
7617 /* Convert -A - 1 to ~A. */
7618 if (INTEGRAL_TYPE_P (type)
7619 && TREE_CODE (arg0) == NEGATE_EXPR
7620 && integer_onep (arg1))
7621 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7623 /* Convert -1 - A to ~A. */
7624 if (INTEGRAL_TYPE_P (type)
7625 && integer_all_onesp (arg0))
7626 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7628 if (! FLOAT_TYPE_P (type))
7630 if (! wins && integer_zerop (arg0))
7631 return negate_expr (fold_convert (type, arg1));
7632 if (integer_zerop (arg1))
7633 return non_lvalue (fold_convert (type, arg0));
7635 /* Fold A - (A & B) into ~B & A. */
7636 if (!TREE_SIDE_EFFECTS (arg0)
7637 && TREE_CODE (arg1) == BIT_AND_EXPR)
7639 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7640 return fold_build2 (BIT_AND_EXPR, type,
7641 fold_build1 (BIT_NOT_EXPR, type,
7642 TREE_OPERAND (arg1, 0)),
7644 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7645 return fold_build2 (BIT_AND_EXPR, type,
7646 fold_build1 (BIT_NOT_EXPR, type,
7647 TREE_OPERAND (arg1, 1)),
7651 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7652 any power of 2 minus 1. */
7653 if (TREE_CODE (arg0) == BIT_AND_EXPR
7654 && TREE_CODE (arg1) == BIT_AND_EXPR
7655 && operand_equal_p (TREE_OPERAND (arg0, 0),
7656 TREE_OPERAND (arg1, 0), 0))
7658 tree mask0 = TREE_OPERAND (arg0, 1);
7659 tree mask1 = TREE_OPERAND (arg1, 1);
7660 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7662 if (operand_equal_p (tem, mask1, 0))
7664 tem = fold_build2 (BIT_XOR_EXPR, type,
7665 TREE_OPERAND (arg0, 0), mask1);
7666 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7671 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7672 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7673 return non_lvalue (fold_convert (type, arg0));
7675 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7676 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7677 (-ARG1 + ARG0) reduces to -ARG1. */
7678 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7679 return negate_expr (fold_convert (type, arg1));
7681 /* Fold &x - &x. This can happen from &x.foo - &x.
7682 This is unsafe for certain floats even in non-IEEE formats.
7683 In IEEE, it is unsafe because it does wrong for NaNs.
7684 Also note that operand_equal_p is always false if an operand
7687 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7688 && operand_equal_p (arg0, arg1, 0))
7689 return fold_convert (type, integer_zero_node);
7691 /* A - B -> A + (-B) if B is easily negatable. */
7692 if (!wins && negate_expr_p (arg1)
7693 && ((FLOAT_TYPE_P (type)
7694 /* Avoid this transformation if B is a positive REAL_CST. */
7695 && (TREE_CODE (arg1) != REAL_CST
7696 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7697 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7698 return fold_build2 (PLUS_EXPR, type,
7699 fold_convert (type, arg0),
7700 fold_convert (type, negate_expr (arg1)));
7702 /* Try folding difference of addresses. */
7706 if ((TREE_CODE (arg0) == ADDR_EXPR
7707 || TREE_CODE (arg1) == ADDR_EXPR)
7708 && ptr_difference_const (arg0, arg1, &diff))
7709 return build_int_cst_type (type, diff);
7712 /* Fold &a[i] - &a[j] to i-j. */
7713 if (TREE_CODE (arg0) == ADDR_EXPR
7714 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7715 && TREE_CODE (arg1) == ADDR_EXPR
7716 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7718 tree aref0 = TREE_OPERAND (arg0, 0);
7719 tree aref1 = TREE_OPERAND (arg1, 0);
7720 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7721 TREE_OPERAND (aref1, 0), 0))
7723 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7724 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7725 tree esz = array_ref_element_size (aref0);
7726 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7727 return fold_build2 (MULT_EXPR, type, diff,
7728 fold_convert (type, esz));
7733 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7734 of the array. Loop optimizer sometimes produce this type of
7736 if (TREE_CODE (arg0) == ADDR_EXPR)
7738 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7740 return fold_convert (type, tem);
7743 if (flag_unsafe_math_optimizations
7744 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7745 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7746 && (tem = distribute_real_division (code, type, arg0, arg1)))
7749 if (TREE_CODE (arg0) == MULT_EXPR
7750 && TREE_CODE (arg1) == MULT_EXPR
7751 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7753 /* (A * C) - (B * C) -> (A-B) * C. */
7754 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7755 TREE_OPERAND (arg1, 1), 0))
7756 return fold_build2 (MULT_EXPR, type,
7757 fold_build2 (MINUS_EXPR, type,
7758 TREE_OPERAND (arg0, 0),
7759 TREE_OPERAND (arg1, 0)),
7760 TREE_OPERAND (arg0, 1));
7761 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7762 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7763 TREE_OPERAND (arg1, 0), 0))
7764 return fold_build2 (MULT_EXPR, type,
7765 TREE_OPERAND (arg0, 0),
7766 fold_build2 (MINUS_EXPR, type,
7767 TREE_OPERAND (arg0, 1),
7768 TREE_OPERAND (arg1, 1)));
7774 /* (-A) * (-B) -> A * B */
7775 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7776 return fold_build2 (MULT_EXPR, type,
7777 TREE_OPERAND (arg0, 0),
7778 negate_expr (arg1));
7779 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7780 return fold_build2 (MULT_EXPR, type,
7782 TREE_OPERAND (arg1, 0));
7784 if (! FLOAT_TYPE_P (type))
7786 if (integer_zerop (arg1))
7787 return omit_one_operand (type, arg1, arg0);
7788 if (integer_onep (arg1))
7789 return non_lvalue (fold_convert (type, arg0));
7790 /* Transform x * -1 into -x. */
7791 if (integer_all_onesp (arg1))
7792 return fold_convert (type, negate_expr (arg0));
7794 /* (a * (1 << b)) is (a << b) */
7795 if (TREE_CODE (arg1) == LSHIFT_EXPR
7796 && integer_onep (TREE_OPERAND (arg1, 0)))
7797 return fold_build2 (LSHIFT_EXPR, type, arg0,
7798 TREE_OPERAND (arg1, 1));
7799 if (TREE_CODE (arg0) == LSHIFT_EXPR
7800 && integer_onep (TREE_OPERAND (arg0, 0)))
7801 return fold_build2 (LSHIFT_EXPR, type, arg1,
7802 TREE_OPERAND (arg0, 1));
7804 if (TREE_CODE (arg1) == INTEGER_CST
7805 && 0 != (tem = extract_muldiv (op0,
7806 fold_convert (type, arg1),
7808 return fold_convert (type, tem);
7813 /* Maybe fold x * 0 to 0. The expressions aren't the same
7814 when x is NaN, since x * 0 is also NaN. Nor are they the
7815 same in modes with signed zeros, since multiplying a
7816 negative value by 0 gives -0, not +0. */
7817 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7818 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7819 && real_zerop (arg1))
7820 return omit_one_operand (type, arg1, arg0);
7821 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7822 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7823 && real_onep (arg1))
7824 return non_lvalue (fold_convert (type, arg0));
7826 /* Transform x * -1.0 into -x. */
7827 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7828 && real_minus_onep (arg1))
7829 return fold_convert (type, negate_expr (arg0));
7831 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7832 if (flag_unsafe_math_optimizations
7833 && TREE_CODE (arg0) == RDIV_EXPR
7834 && TREE_CODE (arg1) == REAL_CST
7835 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7837 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7840 return fold_build2 (RDIV_EXPR, type, tem,
7841 TREE_OPERAND (arg0, 1));
7844 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7845 if (operand_equal_p (arg0, arg1, 0))
7847 tree tem = fold_strip_sign_ops (arg0);
7848 if (tem != NULL_TREE)
7850 tem = fold_convert (type, tem);
7851 return fold_build2 (MULT_EXPR, type, tem, tem);
7855 if (flag_unsafe_math_optimizations)
7857 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7858 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7860 /* Optimizations of root(...)*root(...). */
7861 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7863 tree rootfn, arg, arglist;
7864 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7865 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7867 /* Optimize sqrt(x)*sqrt(x) as x. */
7868 if (BUILTIN_SQRT_P (fcode0)
7869 && operand_equal_p (arg00, arg10, 0)
7870 && ! HONOR_SNANS (TYPE_MODE (type)))
7873 /* Optimize root(x)*root(y) as root(x*y). */
7874 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7875 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7876 arglist = build_tree_list (NULL_TREE, arg);
7877 return build_function_call_expr (rootfn, arglist);
7880 /* Optimize expN(x)*expN(y) as expN(x+y). */
7881 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7883 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7884 tree arg = fold_build2 (PLUS_EXPR, type,
7885 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7886 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7887 tree arglist = build_tree_list (NULL_TREE, arg);
7888 return build_function_call_expr (expfn, arglist);
7891 /* Optimizations of pow(...)*pow(...). */
7892 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7893 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7894 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7896 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7897 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7899 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7900 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7903 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7904 if (operand_equal_p (arg01, arg11, 0))
7906 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7907 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7908 tree arglist = tree_cons (NULL_TREE, arg,
7909 build_tree_list (NULL_TREE,
7911 return build_function_call_expr (powfn, arglist);
7914 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7915 if (operand_equal_p (arg00, arg10, 0))
7917 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7918 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7919 tree arglist = tree_cons (NULL_TREE, arg00,
7920 build_tree_list (NULL_TREE,
7922 return build_function_call_expr (powfn, arglist);
7926 /* Optimize tan(x)*cos(x) as sin(x). */
7927 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7928 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7929 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7930 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7931 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7932 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7933 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7934 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7936 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7938 if (sinfn != NULL_TREE)
7939 return build_function_call_expr (sinfn,
7940 TREE_OPERAND (arg0, 1));
7943 /* Optimize x*pow(x,c) as pow(x,c+1). */
7944 if (fcode1 == BUILT_IN_POW
7945 || fcode1 == BUILT_IN_POWF
7946 || fcode1 == BUILT_IN_POWL)
7948 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7949 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7951 if (TREE_CODE (arg11) == REAL_CST
7952 && ! TREE_CONSTANT_OVERFLOW (arg11)
7953 && operand_equal_p (arg0, arg10, 0))
7955 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7959 c = TREE_REAL_CST (arg11);
7960 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7961 arg = build_real (type, c);
7962 arglist = build_tree_list (NULL_TREE, arg);
7963 arglist = tree_cons (NULL_TREE, arg0, arglist);
7964 return build_function_call_expr (powfn, arglist);
7968 /* Optimize pow(x,c)*x as pow(x,c+1). */
7969 if (fcode0 == BUILT_IN_POW
7970 || fcode0 == BUILT_IN_POWF
7971 || fcode0 == BUILT_IN_POWL)
7973 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7974 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7976 if (TREE_CODE (arg01) == REAL_CST
7977 && ! TREE_CONSTANT_OVERFLOW (arg01)
7978 && operand_equal_p (arg1, arg00, 0))
7980 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7984 c = TREE_REAL_CST (arg01);
7985 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7986 arg = build_real (type, c);
7987 arglist = build_tree_list (NULL_TREE, arg);
7988 arglist = tree_cons (NULL_TREE, arg1, arglist);
7989 return build_function_call_expr (powfn, arglist);
7993 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7995 && operand_equal_p (arg0, arg1, 0))
7997 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8001 tree arg = build_real (type, dconst2);
8002 tree arglist = build_tree_list (NULL_TREE, arg);
8003 arglist = tree_cons (NULL_TREE, arg0, arglist);
8004 return build_function_call_expr (powfn, arglist);
8013 if (integer_all_onesp (arg1))
8014 return omit_one_operand (type, arg1, arg0);
8015 if (integer_zerop (arg1))
8016 return non_lvalue (fold_convert (type, arg0));
8017 if (operand_equal_p (arg0, arg1, 0))
8018 return non_lvalue (fold_convert (type, arg0));
8021 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8022 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8024 t1 = build_int_cst (type, -1);
8025 t1 = force_fit_type (t1, 0, false, false);
8026 return omit_one_operand (type, t1, arg1);
8030 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8031 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8033 t1 = build_int_cst (type, -1);
8034 t1 = force_fit_type (t1, 0, false, false);
8035 return omit_one_operand (type, t1, arg0);
8038 t1 = distribute_bit_expr (code, type, arg0, arg1);
8039 if (t1 != NULL_TREE)
8042 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8044 This results in more efficient code for machines without a NAND
8045 instruction. Combine will canonicalize to the first form
8046 which will allow use of NAND instructions provided by the
8047 backend if they exist. */
8048 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8049 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8051 return fold_build1 (BIT_NOT_EXPR, type,
8052 build2 (BIT_AND_EXPR, type,
8053 TREE_OPERAND (arg0, 0),
8054 TREE_OPERAND (arg1, 0)));
8057 /* See if this can be simplified into a rotate first. If that
8058 is unsuccessful continue in the association code. */
8062 if (integer_zerop (arg1))
8063 return non_lvalue (fold_convert (type, arg0));
8064 if (integer_all_onesp (arg1))
8065 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8066 if (operand_equal_p (arg0, arg1, 0))
8067 return omit_one_operand (type, integer_zero_node, arg0);
8070 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8071 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8073 t1 = build_int_cst (type, -1);
8074 t1 = force_fit_type (t1, 0, false, false);
8075 return omit_one_operand (type, t1, arg1);
8079 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8080 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8082 t1 = build_int_cst (type, -1);
8083 t1 = force_fit_type (t1, 0, false, false);
8084 return omit_one_operand (type, t1, arg0);
8087 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8088 with a constant, and the two constants have no bits in common,
8089 we should treat this as a BIT_IOR_EXPR since this may produce more
8091 if (TREE_CODE (arg0) == BIT_AND_EXPR
8092 && TREE_CODE (arg1) == BIT_AND_EXPR
8093 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8094 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8095 && integer_zerop (const_binop (BIT_AND_EXPR,
8096 TREE_OPERAND (arg0, 1),
8097 TREE_OPERAND (arg1, 1), 0)))
8099 code = BIT_IOR_EXPR;
8103 /* (X | Y) ^ X -> Y & ~ X*/
8104 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8105 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8107 tree t2 = TREE_OPERAND (arg0, 1);
8108 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8110 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8111 fold_convert (type, t1));
8115 /* (Y | X) ^ X -> Y & ~ X*/
8116 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8117 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8119 tree t2 = TREE_OPERAND (arg0, 0);
8120 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8122 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8123 fold_convert (type, t1));
8127 /* X ^ (X | Y) -> Y & ~ X*/
8128 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8129 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8131 tree t2 = TREE_OPERAND (arg1, 1);
8132 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8134 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8135 fold_convert (type, t1));
8139 /* X ^ (Y | X) -> Y & ~ X*/
8140 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8141 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8143 tree t2 = TREE_OPERAND (arg1, 0);
8144 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8146 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8147 fold_convert (type, t1));
8151 /* Convert ~X ^ ~Y to X ^ Y. */
8152 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8153 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8154 return fold_build2 (code, type,
8155 fold_convert (type, TREE_OPERAND (arg0, 0)),
8156 fold_convert (type, TREE_OPERAND (arg1, 0)));
8158 /* See if this can be simplified into a rotate first. If that
8159 is unsuccessful continue in the association code. */
8163 if (integer_all_onesp (arg1))
8164 return non_lvalue (fold_convert (type, arg0));
8165 if (integer_zerop (arg1))
8166 return omit_one_operand (type, arg1, arg0);
8167 if (operand_equal_p (arg0, arg1, 0))
8168 return non_lvalue (fold_convert (type, arg0));
8170 /* ~X & X is always zero. */
8171 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8172 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8173 return omit_one_operand (type, integer_zero_node, arg1);
8175 /* X & ~X is always zero. */
8176 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8177 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8178 return omit_one_operand (type, integer_zero_node, arg0);
8180 t1 = distribute_bit_expr (code, type, arg0, arg1);
8181 if (t1 != NULL_TREE)
8183 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8184 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8185 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8188 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8190 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8191 && (~TREE_INT_CST_LOW (arg1)
8192 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8193 return fold_convert (type, TREE_OPERAND (arg0, 0));
8196 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8198 This results in more efficient code for machines without a NOR
8199 instruction. Combine will canonicalize to the first form
8200 which will allow use of NOR instructions provided by the
8201 backend if they exist. */
8202 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8203 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8205 return fold_build1 (BIT_NOT_EXPR, type,
8206 build2 (BIT_IOR_EXPR, type,
8207 TREE_OPERAND (arg0, 0),
8208 TREE_OPERAND (arg1, 0)));
8214 /* Don't touch a floating-point divide by zero unless the mode
8215 of the constant can represent infinity. */
8216 if (TREE_CODE (arg1) == REAL_CST
8217 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8218 && real_zerop (arg1))
8221 /* (-A) / (-B) -> A / B */
8222 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8223 return fold_build2 (RDIV_EXPR, type,
8224 TREE_OPERAND (arg0, 0),
8225 negate_expr (arg1));
8226 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8227 return fold_build2 (RDIV_EXPR, type,
8229 TREE_OPERAND (arg1, 0));
8231 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8232 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8233 && real_onep (arg1))
8234 return non_lvalue (fold_convert (type, arg0));
8236 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8237 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8238 && real_minus_onep (arg1))
8239 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8241 /* If ARG1 is a constant, we can convert this to a multiply by the
8242 reciprocal. This does not have the same rounding properties,
8243 so only do this if -funsafe-math-optimizations. We can actually
8244 always safely do it if ARG1 is a power of two, but it's hard to
8245 tell if it is or not in a portable manner. */
8246 if (TREE_CODE (arg1) == REAL_CST)
8248 if (flag_unsafe_math_optimizations
8249 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8251 return fold_build2 (MULT_EXPR, type, arg0, tem);
8252 /* Find the reciprocal if optimizing and the result is exact. */
8256 r = TREE_REAL_CST (arg1);
8257 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8259 tem = build_real (type, r);
8260 return fold_build2 (MULT_EXPR, type,
8261 fold_convert (type, arg0), tem);
8265 /* Convert A/B/C to A/(B*C). */
8266 if (flag_unsafe_math_optimizations
8267 && TREE_CODE (arg0) == RDIV_EXPR)
8268 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8269 fold_build2 (MULT_EXPR, type,
8270 TREE_OPERAND (arg0, 1), arg1));
8272 /* Convert A/(B/C) to (A/B)*C. */
8273 if (flag_unsafe_math_optimizations
8274 && TREE_CODE (arg1) == RDIV_EXPR)
8275 return fold_build2 (MULT_EXPR, type,
8276 fold_build2 (RDIV_EXPR, type, arg0,
8277 TREE_OPERAND (arg1, 0)),
8278 TREE_OPERAND (arg1, 1));
8280 /* Convert C1/(X*C2) into (C1/C2)/X. */
8281 if (flag_unsafe_math_optimizations
8282 && TREE_CODE (arg1) == MULT_EXPR
8283 && TREE_CODE (arg0) == REAL_CST
8284 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8286 tree tem = const_binop (RDIV_EXPR, arg0,
8287 TREE_OPERAND (arg1, 1), 0);
8289 return fold_build2 (RDIV_EXPR, type, tem,
8290 TREE_OPERAND (arg1, 0));
8293 if (flag_unsafe_math_optimizations)
8295 enum built_in_function fcode = builtin_mathfn_code (arg1);
8296 /* Optimize x/expN(y) into x*expN(-y). */
8297 if (BUILTIN_EXPONENT_P (fcode))
8299 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8300 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8301 tree arglist = build_tree_list (NULL_TREE,
8302 fold_convert (type, arg));
8303 arg1 = build_function_call_expr (expfn, arglist);
8304 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8307 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8308 if (fcode == BUILT_IN_POW
8309 || fcode == BUILT_IN_POWF
8310 || fcode == BUILT_IN_POWL)
8312 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8313 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8314 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8315 tree neg11 = fold_convert (type, negate_expr (arg11));
8316 tree arglist = tree_cons(NULL_TREE, arg10,
8317 build_tree_list (NULL_TREE, neg11));
8318 arg1 = build_function_call_expr (powfn, arglist);
8319 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8323 if (flag_unsafe_math_optimizations)
8325 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8326 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8328 /* Optimize sin(x)/cos(x) as tan(x). */
8329 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8330 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8331 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8332 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8333 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8335 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8337 if (tanfn != NULL_TREE)
8338 return build_function_call_expr (tanfn,
8339 TREE_OPERAND (arg0, 1));
8342 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8343 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8344 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8345 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8346 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8347 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8349 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8351 if (tanfn != NULL_TREE)
8353 tree tmp = TREE_OPERAND (arg0, 1);
8354 tmp = build_function_call_expr (tanfn, tmp);
8355 return fold_build2 (RDIV_EXPR, type,
8356 build_real (type, dconst1), tmp);
8360 /* Optimize pow(x,c)/x as pow(x,c-1). */
8361 if (fcode0 == BUILT_IN_POW
8362 || fcode0 == BUILT_IN_POWF
8363 || fcode0 == BUILT_IN_POWL)
8365 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8366 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8367 if (TREE_CODE (arg01) == REAL_CST
8368 && ! TREE_CONSTANT_OVERFLOW (arg01)
8369 && operand_equal_p (arg1, arg00, 0))
8371 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8375 c = TREE_REAL_CST (arg01);
8376 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8377 arg = build_real (type, c);
8378 arglist = build_tree_list (NULL_TREE, arg);
8379 arglist = tree_cons (NULL_TREE, arg1, arglist);
8380 return build_function_call_expr (powfn, arglist);
8386 case TRUNC_DIV_EXPR:
8387 case ROUND_DIV_EXPR:
8388 case FLOOR_DIV_EXPR:
8390 case EXACT_DIV_EXPR:
8391 if (integer_onep (arg1))
8392 return non_lvalue (fold_convert (type, arg0));
8393 if (integer_zerop (arg1))
8396 if (!TYPE_UNSIGNED (type)
8397 && TREE_CODE (arg1) == INTEGER_CST
8398 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8399 && TREE_INT_CST_HIGH (arg1) == -1)
8400 return fold_convert (type, negate_expr (arg0));
8402 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8403 operation, EXACT_DIV_EXPR.
8405 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8406 At one time others generated faster code, it's not clear if they do
8407 after the last round to changes to the DIV code in expmed.c. */
8408 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8409 && multiple_of_p (type, arg0, arg1))
8410 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8412 if (TREE_CODE (arg1) == INTEGER_CST
8413 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8414 return fold_convert (type, tem);
8419 case FLOOR_MOD_EXPR:
8420 case ROUND_MOD_EXPR:
8421 case TRUNC_MOD_EXPR:
8422 /* X % 1 is always zero, but be sure to preserve any side
8424 if (integer_onep (arg1))
8425 return omit_one_operand (type, integer_zero_node, arg0);
8427 /* X % 0, return X % 0 unchanged so that we can get the
8428 proper warnings and errors. */
8429 if (integer_zerop (arg1))
8432 /* 0 % X is always zero, but be sure to preserve any side
8433 effects in X. Place this after checking for X == 0. */
8434 if (integer_zerop (arg0))
8435 return omit_one_operand (type, integer_zero_node, arg1);
8437 /* X % -1 is zero. */
8438 if (!TYPE_UNSIGNED (type)
8439 && TREE_CODE (arg1) == INTEGER_CST
8440 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8441 && TREE_INT_CST_HIGH (arg1) == -1)
8442 return omit_one_operand (type, integer_zero_node, arg0);
8444 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8445 i.e. "X % C" into "X & C2", if X and C are positive. */
8446 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8447 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8448 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8450 unsigned HOST_WIDE_INT high, low;
8454 l = tree_log2 (arg1);
8455 if (l >= HOST_BITS_PER_WIDE_INT)
8457 high = ((unsigned HOST_WIDE_INT) 1
8458 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8464 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8467 mask = build_int_cst_wide (type, low, high);
8468 return fold_build2 (BIT_AND_EXPR, type,
8469 fold_convert (type, arg0), mask);
8472 /* X % -C is the same as X % C. */
8473 if (code == TRUNC_MOD_EXPR
8474 && !TYPE_UNSIGNED (type)
8475 && TREE_CODE (arg1) == INTEGER_CST
8476 && !TREE_CONSTANT_OVERFLOW (arg1)
8477 && TREE_INT_CST_HIGH (arg1) < 0
8479 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8480 && !sign_bit_p (arg1, arg1))
8481 return fold_build2 (code, type, fold_convert (type, arg0),
8482 fold_convert (type, negate_expr (arg1)));
8484 /* X % -Y is the same as X % Y. */
8485 if (code == TRUNC_MOD_EXPR
8486 && !TYPE_UNSIGNED (type)
8487 && TREE_CODE (arg1) == NEGATE_EXPR
8489 return fold_build2 (code, type, fold_convert (type, arg0),
8490 fold_convert (type, TREE_OPERAND (arg1, 0)));
8492 if (TREE_CODE (arg1) == INTEGER_CST
8493 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8494 return fold_convert (type, tem);
8500 if (integer_all_onesp (arg0))
8501 return omit_one_operand (type, arg0, arg1);
8505 /* Optimize -1 >> x for arithmetic right shifts. */
8506 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8507 return omit_one_operand (type, arg0, arg1);
8508 /* ... fall through ... */
8512 if (integer_zerop (arg1))
8513 return non_lvalue (fold_convert (type, arg0));
8514 if (integer_zerop (arg0))
8515 return omit_one_operand (type, arg0, arg1);
8517 /* Since negative shift count is not well-defined,
8518 don't try to compute it in the compiler. */
8519 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8522 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8523 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8524 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8525 && host_integerp (TREE_OPERAND (arg0, 1), false)
8526 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8528 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8529 + TREE_INT_CST_LOW (arg1));
8531 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8532 being well defined. */
8533 if (low >= TYPE_PRECISION (type))
8535 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8536 low = low % TYPE_PRECISION (type);
8537 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8538 return build_int_cst (type, 0);
8540 low = TYPE_PRECISION (type) - 1;
8543 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8544 build_int_cst (type, low));
8547 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8548 into x & ((unsigned)-1 >> c) for unsigned types. */
8549 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8550 || (TYPE_UNSIGNED (type)
8551 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8552 && host_integerp (arg1, false)
8553 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8554 && host_integerp (TREE_OPERAND (arg0, 1), false)
8555 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8557 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8558 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8564 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8566 lshift = build_int_cst (type, -1);
8567 lshift = int_const_binop (code, lshift, arg1, 0);
8569 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8573 /* Rewrite an LROTATE_EXPR by a constant into an
8574 RROTATE_EXPR by a new constant. */
8575 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8577 tree tem = build_int_cst (NULL_TREE,
8578 GET_MODE_BITSIZE (TYPE_MODE (type)));
8579 tem = fold_convert (TREE_TYPE (arg1), tem);
8580 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8581 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8584 /* If we have a rotate of a bit operation with the rotate count and
8585 the second operand of the bit operation both constant,
8586 permute the two operations. */
8587 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8588 && (TREE_CODE (arg0) == BIT_AND_EXPR
8589 || TREE_CODE (arg0) == BIT_IOR_EXPR
8590 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8591 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8592 return fold_build2 (TREE_CODE (arg0), type,
8593 fold_build2 (code, type,
8594 TREE_OPERAND (arg0, 0), arg1),
8595 fold_build2 (code, type,
8596 TREE_OPERAND (arg0, 1), arg1));
8598 /* Two consecutive rotates adding up to the width of the mode can
8600 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8601 && TREE_CODE (arg0) == RROTATE_EXPR
8602 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8603 && TREE_INT_CST_HIGH (arg1) == 0
8604 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8605 && ((TREE_INT_CST_LOW (arg1)
8606 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8607 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8608 return TREE_OPERAND (arg0, 0);
8613 if (operand_equal_p (arg0, arg1, 0))
8614 return omit_one_operand (type, arg0, arg1);
8615 if (INTEGRAL_TYPE_P (type)
8616 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8617 return omit_one_operand (type, arg1, arg0);
8621 if (operand_equal_p (arg0, arg1, 0))
8622 return omit_one_operand (type, arg0, arg1);
8623 if (INTEGRAL_TYPE_P (type)
8624 && TYPE_MAX_VALUE (type)
8625 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8626 return omit_one_operand (type, arg1, arg0);
8629 case TRUTH_ANDIF_EXPR:
8630 /* Note that the operands of this must be ints
8631 and their values must be 0 or 1.
8632 ("true" is a fixed value perhaps depending on the language.) */
8633 /* If first arg is constant zero, return it. */
8634 if (integer_zerop (arg0))
8635 return fold_convert (type, arg0);
8636 case TRUTH_AND_EXPR:
8637 /* If either arg is constant true, drop it. */
8638 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8639 return non_lvalue (fold_convert (type, arg1));
8640 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8641 /* Preserve sequence points. */
8642 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8643 return non_lvalue (fold_convert (type, arg0));
8644 /* If second arg is constant zero, result is zero, but first arg
8645 must be evaluated. */
8646 if (integer_zerop (arg1))
8647 return omit_one_operand (type, arg1, arg0);
8648 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8649 case will be handled here. */
8650 if (integer_zerop (arg0))
8651 return omit_one_operand (type, arg0, arg1);
8653 /* !X && X is always false. */
8654 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8655 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8656 return omit_one_operand (type, integer_zero_node, arg1);
8657 /* X && !X is always false. */
8658 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8659 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8660 return omit_one_operand (type, integer_zero_node, arg0);
8662 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8663 means A >= Y && A != MAX, but in this case we know that
8666 if (!TREE_SIDE_EFFECTS (arg0)
8667 && !TREE_SIDE_EFFECTS (arg1))
8669 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8670 if (tem && !operand_equal_p (tem, arg0, 0))
8671 return fold_build2 (code, type, tem, arg1);
8673 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8674 if (tem && !operand_equal_p (tem, arg1, 0))
8675 return fold_build2 (code, type, arg0, tem);
8679 /* We only do these simplifications if we are optimizing. */
8683 /* Check for things like (A || B) && (A || C). We can convert this
8684 to A || (B && C). Note that either operator can be any of the four
8685 truth and/or operations and the transformation will still be
8686 valid. Also note that we only care about order for the
8687 ANDIF and ORIF operators. If B contains side effects, this
8688 might change the truth-value of A. */
8689 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8690 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8691 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8692 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8693 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8694 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8696 tree a00 = TREE_OPERAND (arg0, 0);
8697 tree a01 = TREE_OPERAND (arg0, 1);
8698 tree a10 = TREE_OPERAND (arg1, 0);
8699 tree a11 = TREE_OPERAND (arg1, 1);
8700 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8701 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8702 && (code == TRUTH_AND_EXPR
8703 || code == TRUTH_OR_EXPR));
8705 if (operand_equal_p (a00, a10, 0))
8706 return fold_build2 (TREE_CODE (arg0), type, a00,
8707 fold_build2 (code, type, a01, a11));
8708 else if (commutative && operand_equal_p (a00, a11, 0))
8709 return fold_build2 (TREE_CODE (arg0), type, a00,
8710 fold_build2 (code, type, a01, a10));
8711 else if (commutative && operand_equal_p (a01, a10, 0))
8712 return fold_build2 (TREE_CODE (arg0), type, a01,
8713 fold_build2 (code, type, a00, a11));
8715 /* This case if tricky because we must either have commutative
8716 operators or else A10 must not have side-effects. */
8718 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8719 && operand_equal_p (a01, a11, 0))
8720 return fold_build2 (TREE_CODE (arg0), type,
8721 fold_build2 (code, type, a00, a10),
8725 /* See if we can build a range comparison. */
8726 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8729 /* Check for the possibility of merging component references. If our
8730 lhs is another similar operation, try to merge its rhs with our
8731 rhs. Then try to merge our lhs and rhs. */
8732 if (TREE_CODE (arg0) == code
8733 && 0 != (tem = fold_truthop (code, type,
8734 TREE_OPERAND (arg0, 1), arg1)))
8735 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8737 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8742 case TRUTH_ORIF_EXPR:
8743 /* Note that the operands of this must be ints
8744 and their values must be 0 or true.
8745 ("true" is a fixed value perhaps depending on the language.) */
8746 /* If first arg is constant true, return it. */
8747 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8748 return fold_convert (type, arg0);
8750 /* If either arg is constant zero, drop it. */
8751 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8752 return non_lvalue (fold_convert (type, arg1));
8753 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8754 /* Preserve sequence points. */
8755 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8756 return non_lvalue (fold_convert (type, arg0));
8757 /* If second arg is constant true, result is true, but we must
8758 evaluate first arg. */
8759 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8760 return omit_one_operand (type, arg1, arg0);
8761 /* Likewise for first arg, but note this only occurs here for
8763 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8764 return omit_one_operand (type, arg0, arg1);
8766 /* !X || X is always true. */
8767 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8768 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8769 return omit_one_operand (type, integer_one_node, arg1);
8770 /* X || !X is always true. */
8771 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8772 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8773 return omit_one_operand (type, integer_one_node, arg0);
8777 case TRUTH_XOR_EXPR:
8778 /* If the second arg is constant zero, drop it. */
8779 if (integer_zerop (arg1))
8780 return non_lvalue (fold_convert (type, arg0));
8781 /* If the second arg is constant true, this is a logical inversion. */
8782 if (integer_onep (arg1))
8784 /* Only call invert_truthvalue if operand is a truth value. */
8785 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8786 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8788 tem = invert_truthvalue (arg0);
8789 return non_lvalue (fold_convert (type, tem));
8791 /* Identical arguments cancel to zero. */
8792 if (operand_equal_p (arg0, arg1, 0))
8793 return omit_one_operand (type, integer_zero_node, arg0);
8795 /* !X ^ X is always true. */
8796 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8797 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8798 return omit_one_operand (type, integer_one_node, arg1);
8800 /* X ^ !X is always true. */
8801 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8802 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8803 return omit_one_operand (type, integer_one_node, arg0);
8813 /* If one arg is a real or integer constant, put it last. */
8814 if (tree_swap_operands_p (arg0, arg1, true))
8815 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8817 /* bool_var != 0 becomes bool_var. */
8818 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8820 return non_lvalue (fold_convert (type, arg0));
8822 /* bool_var == 1 becomes bool_var. */
8823 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8825 return non_lvalue (fold_convert (type, arg0));
8827 /* If this is an equality comparison of the address of a non-weak
8828 object against zero, then we know the result. */
8829 if ((code == EQ_EXPR || code == NE_EXPR)
8830 && TREE_CODE (arg0) == ADDR_EXPR
8831 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8832 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8833 && integer_zerop (arg1))
8834 return constant_boolean_node (code != EQ_EXPR, type);
8836 /* If this is an equality comparison of the address of two non-weak,
8837 unaliased symbols neither of which are extern (since we do not
8838 have access to attributes for externs), then we know the result. */
8839 if ((code == EQ_EXPR || code == NE_EXPR)
8840 && TREE_CODE (arg0) == ADDR_EXPR
8841 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8842 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8843 && ! lookup_attribute ("alias",
8844 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8845 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8846 && TREE_CODE (arg1) == ADDR_EXPR
8847 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8848 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8849 && ! lookup_attribute ("alias",
8850 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8851 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8853 /* We know that we're looking at the address of two
8854 non-weak, unaliased, static _DECL nodes.
8856 It is both wasteful and incorrect to call operand_equal_p
8857 to compare the two ADDR_EXPR nodes. It is wasteful in that
8858 all we need to do is test pointer equality for the arguments
8859 to the two ADDR_EXPR nodes. It is incorrect to use
8860 operand_equal_p as that function is NOT equivalent to a
8861 C equality test. It can in fact return false for two
8862 objects which would test as equal using the C equality
8864 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8865 return constant_boolean_node (equal
8866 ? code == EQ_EXPR : code != EQ_EXPR,
8870 /* If this is a comparison of two exprs that look like an
8871 ARRAY_REF of the same object, then we can fold this to a
8872 comparison of the two offsets. */
8873 if (TREE_CODE_CLASS (code) == tcc_comparison)
8875 tree base0, offset0, base1, offset1;
8877 if (extract_array_ref (arg0, &base0, &offset0)
8878 && extract_array_ref (arg1, &base1, &offset1)
8879 && operand_equal_p (base0, base1, 0))
8881 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
8882 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
8883 offset0 = NULL_TREE;
8884 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
8885 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
8886 offset1 = NULL_TREE;
8887 if (offset0 == NULL_TREE
8888 && offset1 == NULL_TREE)
8890 offset0 = integer_zero_node;
8891 offset1 = integer_zero_node;
8893 else if (offset0 == NULL_TREE)
8894 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8895 else if (offset1 == NULL_TREE)
8896 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8898 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8899 return fold_build2 (code, type, offset0, offset1);
8903 /* Transform comparisons of the form X +- C CMP X. */
8904 if ((code != EQ_EXPR && code != NE_EXPR)
8905 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8906 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8907 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8908 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8909 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8910 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8911 && !(flag_wrapv || flag_trapv))))
8913 tree arg01 = TREE_OPERAND (arg0, 1);
8914 enum tree_code code0 = TREE_CODE (arg0);
8917 if (TREE_CODE (arg01) == REAL_CST)
8918 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8920 is_positive = tree_int_cst_sgn (arg01);
8922 /* (X - c) > X becomes false. */
8924 && ((code0 == MINUS_EXPR && is_positive >= 0)
8925 || (code0 == PLUS_EXPR && is_positive <= 0)))
8926 return constant_boolean_node (0, type);
8928 /* Likewise (X + c) < X becomes false. */
8930 && ((code0 == PLUS_EXPR && is_positive >= 0)
8931 || (code0 == MINUS_EXPR && is_positive <= 0)))
8932 return constant_boolean_node (0, type);
8934 /* Convert (X - c) <= X to true. */
8935 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8937 && ((code0 == MINUS_EXPR && is_positive >= 0)
8938 || (code0 == PLUS_EXPR && is_positive <= 0)))
8939 return constant_boolean_node (1, type);
8941 /* Convert (X + c) >= X to true. */
8942 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8944 && ((code0 == PLUS_EXPR && is_positive >= 0)
8945 || (code0 == MINUS_EXPR && is_positive <= 0)))
8946 return constant_boolean_node (1, type);
8948 if (TREE_CODE (arg01) == INTEGER_CST)
8950 /* Convert X + c > X and X - c < X to true for integers. */
8952 && ((code0 == PLUS_EXPR && is_positive > 0)
8953 || (code0 == MINUS_EXPR && is_positive < 0)))
8954 return constant_boolean_node (1, type);
8957 && ((code0 == MINUS_EXPR && is_positive > 0)
8958 || (code0 == PLUS_EXPR && is_positive < 0)))
8959 return constant_boolean_node (1, type);
8961 /* Convert X + c <= X and X - c >= X to false for integers. */
8963 && ((code0 == PLUS_EXPR && is_positive > 0)
8964 || (code0 == MINUS_EXPR && is_positive < 0)))
8965 return constant_boolean_node (0, type);
8968 && ((code0 == MINUS_EXPR && is_positive > 0)
8969 || (code0 == PLUS_EXPR && is_positive < 0)))
8970 return constant_boolean_node (0, type);
8974 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8975 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8976 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8977 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8978 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8979 && !(flag_wrapv || flag_trapv))
8980 && (TREE_CODE (arg1) == INTEGER_CST
8981 && !TREE_OVERFLOW (arg1)))
8983 tree const1 = TREE_OPERAND (arg0, 1);
8985 tree variable = TREE_OPERAND (arg0, 0);
8988 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8990 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8991 TREE_TYPE (arg1), const2, const1);
8992 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8993 && (TREE_CODE (lhs) != INTEGER_CST
8994 || !TREE_OVERFLOW (lhs)))
8995 return fold_build2 (code, type, variable, lhs);
8998 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9000 tree targ0 = strip_float_extensions (arg0);
9001 tree targ1 = strip_float_extensions (arg1);
9002 tree newtype = TREE_TYPE (targ0);
9004 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9005 newtype = TREE_TYPE (targ1);
9007 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9008 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9009 return fold_build2 (code, type, fold_convert (newtype, targ0),
9010 fold_convert (newtype, targ1));
9012 /* (-a) CMP (-b) -> b CMP a */
9013 if (TREE_CODE (arg0) == NEGATE_EXPR
9014 && TREE_CODE (arg1) == NEGATE_EXPR)
9015 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9016 TREE_OPERAND (arg0, 0));
9018 if (TREE_CODE (arg1) == REAL_CST)
9020 REAL_VALUE_TYPE cst;
9021 cst = TREE_REAL_CST (arg1);
9023 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9024 if (TREE_CODE (arg0) == NEGATE_EXPR)
9026 fold_build2 (swap_tree_comparison (code), type,
9027 TREE_OPERAND (arg0, 0),
9028 build_real (TREE_TYPE (arg1),
9029 REAL_VALUE_NEGATE (cst)));
9031 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9032 /* a CMP (-0) -> a CMP 0 */
9033 if (REAL_VALUE_MINUS_ZERO (cst))
9034 return fold_build2 (code, type, arg0,
9035 build_real (TREE_TYPE (arg1), dconst0));
9037 /* x != NaN is always true, other ops are always false. */
9038 if (REAL_VALUE_ISNAN (cst)
9039 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9041 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9042 return omit_one_operand (type, tem, arg0);
9045 /* Fold comparisons against infinity. */
9046 if (REAL_VALUE_ISINF (cst))
9048 tem = fold_inf_compare (code, type, arg0, arg1);
9049 if (tem != NULL_TREE)
9054 /* If this is a comparison of a real constant with a PLUS_EXPR
9055 or a MINUS_EXPR of a real constant, we can convert it into a
9056 comparison with a revised real constant as long as no overflow
9057 occurs when unsafe_math_optimizations are enabled. */
9058 if (flag_unsafe_math_optimizations
9059 && TREE_CODE (arg1) == REAL_CST
9060 && (TREE_CODE (arg0) == PLUS_EXPR
9061 || TREE_CODE (arg0) == MINUS_EXPR)
9062 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9063 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9064 ? MINUS_EXPR : PLUS_EXPR,
9065 arg1, TREE_OPERAND (arg0, 1), 0))
9066 && ! TREE_CONSTANT_OVERFLOW (tem))
9067 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9069 /* Likewise, we can simplify a comparison of a real constant with
9070 a MINUS_EXPR whose first operand is also a real constant, i.e.
9071 (c1 - x) < c2 becomes x > c1-c2. */
9072 if (flag_unsafe_math_optimizations
9073 && TREE_CODE (arg1) == REAL_CST
9074 && TREE_CODE (arg0) == MINUS_EXPR
9075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9076 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9078 && ! TREE_CONSTANT_OVERFLOW (tem))
9079 return fold_build2 (swap_tree_comparison (code), type,
9080 TREE_OPERAND (arg0, 1), tem);
9082 /* Fold comparisons against built-in math functions. */
9083 if (TREE_CODE (arg1) == REAL_CST
9084 && flag_unsafe_math_optimizations
9085 && ! flag_errno_math)
9087 enum built_in_function fcode = builtin_mathfn_code (arg0);
9089 if (fcode != END_BUILTINS)
9091 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9092 if (tem != NULL_TREE)
9098 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9099 if (TREE_CONSTANT (arg1)
9100 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9101 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9102 /* This optimization is invalid for ordered comparisons
9103 if CONST+INCR overflows or if foo+incr might overflow.
9104 This optimization is invalid for floating point due to rounding.
9105 For pointer types we assume overflow doesn't happen. */
9106 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9107 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9108 && (code == EQ_EXPR || code == NE_EXPR))))
9110 tree varop, newconst;
9112 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9114 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9115 arg1, TREE_OPERAND (arg0, 1));
9116 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9117 TREE_OPERAND (arg0, 0),
9118 TREE_OPERAND (arg0, 1));
9122 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9123 arg1, TREE_OPERAND (arg0, 1));
9124 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9125 TREE_OPERAND (arg0, 0),
9126 TREE_OPERAND (arg0, 1));
9130 /* If VAROP is a reference to a bitfield, we must mask
9131 the constant by the width of the field. */
9132 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9133 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9134 && host_integerp (DECL_SIZE (TREE_OPERAND
9135 (TREE_OPERAND (varop, 0), 1)), 1))
9137 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9138 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9139 tree folded_compare, shift;
9141 /* First check whether the comparison would come out
9142 always the same. If we don't do that we would
9143 change the meaning with the masking. */
9144 folded_compare = fold_build2 (code, type,
9145 TREE_OPERAND (varop, 0), arg1);
9146 if (integer_zerop (folded_compare)
9147 || integer_onep (folded_compare))
9148 return omit_one_operand (type, folded_compare, varop);
9150 shift = build_int_cst (NULL_TREE,
9151 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9152 shift = fold_convert (TREE_TYPE (varop), shift);
9153 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9155 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9159 return fold_build2 (code, type, varop, newconst);
9162 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9163 This transformation affects the cases which are handled in later
9164 optimizations involving comparisons with non-negative constants. */
9165 if (TREE_CODE (arg1) == INTEGER_CST
9166 && TREE_CODE (arg0) != INTEGER_CST
9167 && tree_int_cst_sgn (arg1) > 0)
9172 arg1 = const_binop (MINUS_EXPR, arg1,
9173 build_int_cst (TREE_TYPE (arg1), 1), 0);
9174 return fold_build2 (GT_EXPR, type, arg0,
9175 fold_convert (TREE_TYPE (arg0), arg1));
9178 arg1 = const_binop (MINUS_EXPR, arg1,
9179 build_int_cst (TREE_TYPE (arg1), 1), 0);
9180 return fold_build2 (LE_EXPR, type, arg0,
9181 fold_convert (TREE_TYPE (arg0), arg1));
9188 /* Comparisons with the highest or lowest possible integer of
9189 the specified size will have known values. */
9191 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9193 if (TREE_CODE (arg1) == INTEGER_CST
9194 && ! TREE_CONSTANT_OVERFLOW (arg1)
9195 && width <= 2 * HOST_BITS_PER_WIDE_INT
9196 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9197 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9199 HOST_WIDE_INT signed_max_hi;
9200 unsigned HOST_WIDE_INT signed_max_lo;
9201 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9203 if (width <= HOST_BITS_PER_WIDE_INT)
9205 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9210 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9212 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9218 max_lo = signed_max_lo;
9219 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9225 width -= HOST_BITS_PER_WIDE_INT;
9227 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9232 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9234 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9239 max_hi = signed_max_hi;
9240 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9244 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9245 && TREE_INT_CST_LOW (arg1) == max_lo)
9249 return omit_one_operand (type, integer_zero_node, arg0);
9252 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9255 return omit_one_operand (type, integer_one_node, arg0);
9258 return fold_build2 (NE_EXPR, type, arg0, arg1);
9260 /* The GE_EXPR and LT_EXPR cases above are not normally
9261 reached because of previous transformations. */
9266 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9268 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9272 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9273 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9275 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9276 return fold_build2 (NE_EXPR, type, arg0, arg1);
9280 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9282 && TREE_INT_CST_LOW (arg1) == min_lo)
9286 return omit_one_operand (type, integer_zero_node, arg0);
9289 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9292 return omit_one_operand (type, integer_one_node, arg0);
9295 return fold_build2 (NE_EXPR, type, op0, op1);
9300 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9302 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9306 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9307 return fold_build2 (NE_EXPR, type, arg0, arg1);
9309 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9310 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9315 else if (!in_gimple_form
9316 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9317 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9318 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9319 /* signed_type does not work on pointer types. */
9320 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9322 /* The following case also applies to X < signed_max+1
9323 and X >= signed_max+1 because previous transformations. */
9324 if (code == LE_EXPR || code == GT_EXPR)
9327 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9328 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9329 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9330 type, fold_convert (st0, arg0),
9331 build_int_cst (st1, 0));
9337 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9338 a MINUS_EXPR of a constant, we can convert it into a comparison with
9339 a revised constant as long as no overflow occurs. */
9340 if ((code == EQ_EXPR || code == NE_EXPR)
9341 && TREE_CODE (arg1) == INTEGER_CST
9342 && (TREE_CODE (arg0) == PLUS_EXPR
9343 || TREE_CODE (arg0) == MINUS_EXPR)
9344 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9345 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9346 ? MINUS_EXPR : PLUS_EXPR,
9347 arg1, TREE_OPERAND (arg0, 1), 0))
9348 && ! TREE_CONSTANT_OVERFLOW (tem))
9349 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9351 /* Similarly for a NEGATE_EXPR. */
9352 else if ((code == EQ_EXPR || code == NE_EXPR)
9353 && TREE_CODE (arg0) == NEGATE_EXPR
9354 && TREE_CODE (arg1) == INTEGER_CST
9355 && 0 != (tem = negate_expr (arg1))
9356 && TREE_CODE (tem) == INTEGER_CST
9357 && ! TREE_CONSTANT_OVERFLOW (tem))
9358 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9360 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9361 for !=. Don't do this for ordered comparisons due to overflow. */
9362 else if ((code == NE_EXPR || code == EQ_EXPR)
9363 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9364 return fold_build2 (code, type,
9365 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9367 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9368 && (TREE_CODE (arg0) == NOP_EXPR
9369 || TREE_CODE (arg0) == CONVERT_EXPR))
9371 /* If we are widening one operand of an integer comparison,
9372 see if the other operand is similarly being widened. Perhaps we
9373 can do the comparison in the narrower type. */
9374 tem = fold_widened_comparison (code, type, arg0, arg1);
9378 /* Or if we are changing signedness. */
9379 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9384 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9385 constant, we can simplify it. */
9386 else if (TREE_CODE (arg1) == INTEGER_CST
9387 && (TREE_CODE (arg0) == MIN_EXPR
9388 || TREE_CODE (arg0) == MAX_EXPR)
9389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9391 tem = optimize_minmax_comparison (code, type, op0, op1);
9398 /* If we are comparing an ABS_EXPR with a constant, we can
9399 convert all the cases into explicit comparisons, but they may
9400 well not be faster than doing the ABS and one comparison.
9401 But ABS (X) <= C is a range comparison, which becomes a subtraction
9402 and a comparison, and is probably faster. */
9403 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9404 && TREE_CODE (arg0) == ABS_EXPR
9405 && ! TREE_SIDE_EFFECTS (arg0)
9406 && (0 != (tem = negate_expr (arg1)))
9407 && TREE_CODE (tem) == INTEGER_CST
9408 && ! TREE_CONSTANT_OVERFLOW (tem))
9409 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9410 build2 (GE_EXPR, type,
9411 TREE_OPERAND (arg0, 0), tem),
9412 build2 (LE_EXPR, type,
9413 TREE_OPERAND (arg0, 0), arg1));
9415 /* Convert ABS_EXPR<x> >= 0 to true. */
9416 else if (code == GE_EXPR
9417 && tree_expr_nonnegative_p (arg0)
9418 && (integer_zerop (arg1)
9419 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9420 && real_zerop (arg1))))
9421 return omit_one_operand (type, integer_one_node, arg0);
9423 /* Convert ABS_EXPR<x> < 0 to false. */
9424 else if (code == LT_EXPR
9425 && tree_expr_nonnegative_p (arg0)
9426 && (integer_zerop (arg1) || real_zerop (arg1)))
9427 return omit_one_operand (type, integer_zero_node, arg0);
9429 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9430 else if ((code == EQ_EXPR || code == NE_EXPR)
9431 && TREE_CODE (arg0) == ABS_EXPR
9432 && (integer_zerop (arg1) || real_zerop (arg1)))
9433 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9435 /* If this is an EQ or NE comparison with zero and ARG0 is
9436 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9437 two operations, but the latter can be done in one less insn
9438 on machines that have only two-operand insns or on which a
9439 constant cannot be the first operand. */
9440 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9441 && TREE_CODE (arg0) == BIT_AND_EXPR)
9443 tree arg00 = TREE_OPERAND (arg0, 0);
9444 tree arg01 = TREE_OPERAND (arg0, 1);
9445 if (TREE_CODE (arg00) == LSHIFT_EXPR
9446 && integer_onep (TREE_OPERAND (arg00, 0)))
9448 fold_build2 (code, type,
9449 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9450 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9451 arg01, TREE_OPERAND (arg00, 1)),
9452 fold_convert (TREE_TYPE (arg0),
9455 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9456 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9458 fold_build2 (code, type,
9459 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9460 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9461 arg00, TREE_OPERAND (arg01, 1)),
9462 fold_convert (TREE_TYPE (arg0),
9467 /* If this is an NE or EQ comparison of zero against the result of a
9468 signed MOD operation whose second operand is a power of 2, make
9469 the MOD operation unsigned since it is simpler and equivalent. */
9470 if ((code == NE_EXPR || code == EQ_EXPR)
9471 && integer_zerop (arg1)
9472 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9473 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9474 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9475 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9476 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9477 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9479 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9480 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9481 fold_convert (newtype,
9482 TREE_OPERAND (arg0, 0)),
9483 fold_convert (newtype,
9484 TREE_OPERAND (arg0, 1)));
9486 return fold_build2 (code, type, newmod,
9487 fold_convert (newtype, arg1));
9490 /* If this is an NE comparison of zero with an AND of one, remove the
9491 comparison since the AND will give the correct value. */
9492 if (code == NE_EXPR && integer_zerop (arg1)
9493 && TREE_CODE (arg0) == BIT_AND_EXPR
9494 && integer_onep (TREE_OPERAND (arg0, 1)))
9495 return fold_convert (type, arg0);
9497 /* If we have (A & C) == C where C is a power of 2, convert this into
9498 (A & C) != 0. Similarly for NE_EXPR. */
9499 if ((code == EQ_EXPR || code == NE_EXPR)
9500 && TREE_CODE (arg0) == BIT_AND_EXPR
9501 && integer_pow2p (TREE_OPERAND (arg0, 1))
9502 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9503 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9504 arg0, fold_convert (TREE_TYPE (arg0),
9505 integer_zero_node));
9507 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9508 bit, then fold the expression into A < 0 or A >= 0. */
9509 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9513 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9514 Similarly for NE_EXPR. */
9515 if ((code == EQ_EXPR || code == NE_EXPR)
9516 && TREE_CODE (arg0) == BIT_AND_EXPR
9517 && TREE_CODE (arg1) == INTEGER_CST
9518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9520 tree notc = fold_build1 (BIT_NOT_EXPR,
9521 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9522 TREE_OPERAND (arg0, 1));
9523 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9525 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9526 if (integer_nonzerop (dandnotc))
9527 return omit_one_operand (type, rslt, arg0);
9530 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9531 Similarly for NE_EXPR. */
9532 if ((code == EQ_EXPR || code == NE_EXPR)
9533 && TREE_CODE (arg0) == BIT_IOR_EXPR
9534 && TREE_CODE (arg1) == INTEGER_CST
9535 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9537 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9538 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9539 TREE_OPERAND (arg0, 1), notd);
9540 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9541 if (integer_nonzerop (candnotd))
9542 return omit_one_operand (type, rslt, arg0);
9545 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9546 and similarly for >= into !=. */
9547 if ((code == LT_EXPR || code == GE_EXPR)
9548 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9549 && TREE_CODE (arg1) == LSHIFT_EXPR
9550 && integer_onep (TREE_OPERAND (arg1, 0)))
9551 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9552 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9553 TREE_OPERAND (arg1, 1)),
9554 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9556 else if ((code == LT_EXPR || code == GE_EXPR)
9557 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9558 && (TREE_CODE (arg1) == NOP_EXPR
9559 || TREE_CODE (arg1) == CONVERT_EXPR)
9560 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9561 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9563 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9564 fold_convert (TREE_TYPE (arg0),
9565 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9566 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9568 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9570 /* Simplify comparison of something with itself. (For IEEE
9571 floating-point, we can only do some of these simplifications.) */
9572 if (operand_equal_p (arg0, arg1, 0))
9577 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9578 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9579 return constant_boolean_node (1, type);
9584 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9585 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9586 return constant_boolean_node (1, type);
9587 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9590 /* For NE, we can only do this simplification if integer
9591 or we don't honor IEEE floating point NaNs. */
9592 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9593 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9595 /* ... fall through ... */
9598 return constant_boolean_node (0, type);
9604 /* If we are comparing an expression that just has comparisons
9605 of two integer values, arithmetic expressions of those comparisons,
9606 and constants, we can simplify it. There are only three cases
9607 to check: the two values can either be equal, the first can be
9608 greater, or the second can be greater. Fold the expression for
9609 those three values. Since each value must be 0 or 1, we have
9610 eight possibilities, each of which corresponds to the constant 0
9611 or 1 or one of the six possible comparisons.
9613 This handles common cases like (a > b) == 0 but also handles
9614 expressions like ((x > y) - (y > x)) > 0, which supposedly
9615 occur in macroized code. */
9617 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9619 tree cval1 = 0, cval2 = 0;
9622 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9623 /* Don't handle degenerate cases here; they should already
9624 have been handled anyway. */
9625 && cval1 != 0 && cval2 != 0
9626 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9627 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9628 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9629 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9630 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9631 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9632 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9634 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9635 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9637 /* We can't just pass T to eval_subst in case cval1 or cval2
9638 was the same as ARG1. */
9641 = fold_build2 (code, type,
9642 eval_subst (arg0, cval1, maxval,
9646 = fold_build2 (code, type,
9647 eval_subst (arg0, cval1, maxval,
9651 = fold_build2 (code, type,
9652 eval_subst (arg0, cval1, minval,
9656 /* All three of these results should be 0 or 1. Confirm they
9657 are. Then use those values to select the proper code
9660 if ((integer_zerop (high_result)
9661 || integer_onep (high_result))
9662 && (integer_zerop (equal_result)
9663 || integer_onep (equal_result))
9664 && (integer_zerop (low_result)
9665 || integer_onep (low_result)))
9667 /* Make a 3-bit mask with the high-order bit being the
9668 value for `>', the next for '=', and the low for '<'. */
9669 switch ((integer_onep (high_result) * 4)
9670 + (integer_onep (equal_result) * 2)
9671 + integer_onep (low_result))
9675 return omit_one_operand (type, integer_zero_node, arg0);
9696 return omit_one_operand (type, integer_one_node, arg0);
9700 return save_expr (build2 (code, type, cval1, cval2));
9702 return fold_build2 (code, type, cval1, cval2);
9707 /* If this is a comparison of a field, we may be able to simplify it. */
9708 if (((TREE_CODE (arg0) == COMPONENT_REF
9709 && lang_hooks.can_use_bit_fields_p ())
9710 || TREE_CODE (arg0) == BIT_FIELD_REF)
9711 && (code == EQ_EXPR || code == NE_EXPR)
9712 /* Handle the constant case even without -O
9713 to make sure the warnings are given. */
9714 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9716 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9721 /* Fold a comparison of the address of COMPONENT_REFs with the same
9722 type and component to a comparison of the address of the base
9723 object. In short, &x->a OP &y->a to x OP y and
9724 &x->a OP &y.a to x OP &y */
9725 if (TREE_CODE (arg0) == ADDR_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9727 && TREE_CODE (arg1) == ADDR_EXPR
9728 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9730 tree cref0 = TREE_OPERAND (arg0, 0);
9731 tree cref1 = TREE_OPERAND (arg1, 0);
9732 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9734 tree op0 = TREE_OPERAND (cref0, 0);
9735 tree op1 = TREE_OPERAND (cref1, 0);
9736 return fold_build2 (code, type,
9737 build_fold_addr_expr (op0),
9738 build_fold_addr_expr (op1));
9742 /* Optimize comparisons of strlen vs zero to a compare of the
9743 first character of the string vs zero. To wit,
9744 strlen(ptr) == 0 => *ptr == 0
9745 strlen(ptr) != 0 => *ptr != 0
9746 Other cases should reduce to one of these two (or a constant)
9747 due to the return value of strlen being unsigned. */
9748 if ((code == EQ_EXPR || code == NE_EXPR)
9749 && integer_zerop (arg1)
9750 && TREE_CODE (arg0) == CALL_EXPR)
9752 tree fndecl = get_callee_fndecl (arg0);
9756 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9757 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9758 && (arglist = TREE_OPERAND (arg0, 1))
9759 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9760 && ! TREE_CHAIN (arglist))
9762 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9763 return fold_build2 (code, type, iref,
9764 build_int_cst (TREE_TYPE (iref), 0));
9768 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9769 into a single range test. */
9770 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9771 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9772 && TREE_CODE (arg1) == INTEGER_CST
9773 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9774 && !integer_zerop (TREE_OPERAND (arg0, 1))
9775 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9776 && !TREE_OVERFLOW (arg1))
9778 t1 = fold_div_compare (code, type, arg0, arg1);
9779 if (t1 != NULL_TREE)
9783 if ((code == EQ_EXPR || code == NE_EXPR)
9784 && integer_zerop (arg1)
9785 && tree_expr_nonzero_p (arg0))
9787 tree res = constant_boolean_node (code==NE_EXPR, type);
9788 return omit_one_operand (type, res, arg0);
9791 t1 = fold_relational_const (code, type, arg0, arg1);
9792 return t1 == NULL_TREE ? NULL_TREE : t1;
9794 case UNORDERED_EXPR:
9802 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9804 t1 = fold_relational_const (code, type, arg0, arg1);
9805 if (t1 != NULL_TREE)
9809 /* If the first operand is NaN, the result is constant. */
9810 if (TREE_CODE (arg0) == REAL_CST
9811 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9812 && (code != LTGT_EXPR || ! flag_trapping_math))
9814 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9817 return omit_one_operand (type, t1, arg1);
9820 /* If the second operand is NaN, the result is constant. */
9821 if (TREE_CODE (arg1) == REAL_CST
9822 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9823 && (code != LTGT_EXPR || ! flag_trapping_math))
9825 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9828 return omit_one_operand (type, t1, arg0);
9831 /* Simplify unordered comparison of something with itself. */
9832 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9833 && operand_equal_p (arg0, arg1, 0))
9834 return constant_boolean_node (1, type);
9836 if (code == LTGT_EXPR
9837 && !flag_trapping_math
9838 && operand_equal_p (arg0, arg1, 0))
9839 return constant_boolean_node (0, type);
9841 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9843 tree targ0 = strip_float_extensions (arg0);
9844 tree targ1 = strip_float_extensions (arg1);
9845 tree newtype = TREE_TYPE (targ0);
9847 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9848 newtype = TREE_TYPE (targ1);
9850 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9851 return fold_build2 (code, type, fold_convert (newtype, targ0),
9852 fold_convert (newtype, targ1));
9858 /* When pedantic, a compound expression can be neither an lvalue
9859 nor an integer constant expression. */
9860 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9862 /* Don't let (0, 0) be null pointer constant. */
9863 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9864 : fold_convert (type, arg1);
9865 return pedantic_non_lvalue (tem);
9869 return build_complex (type, arg0, arg1);
9873 /* An ASSERT_EXPR should never be passed to fold_binary. */
9878 } /* switch (code) */
9881 /* Callback for walk_tree, looking for LABEL_EXPR.
9882 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9883 Do not check the sub-tree of GOTO_EXPR. */
9886 contains_label_1 (tree *tp,
9888 void *data ATTRIBUTE_UNUSED)
9890 switch (TREE_CODE (*tp))
9902 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9903 accessible from outside the sub-tree. Returns NULL_TREE if no
9904 addressable label is found. */
9907 contains_label_p (tree st)
9909 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9912 /* Fold a ternary expression of code CODE and type TYPE with operands
9913 OP0, OP1, and OP2. Return the folded expression if folding is
9914 successful. Otherwise, return NULL_TREE. */
9917 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9920 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9921 enum tree_code_class kind = TREE_CODE_CLASS (code);
9923 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9924 && TREE_CODE_LENGTH (code) == 3);
9926 /* Strip any conversions that don't change the mode. This is safe
9927 for every expression, except for a comparison expression because
9928 its signedness is derived from its operands. So, in the latter
9929 case, only strip conversions that don't change the signedness.
9931 Note that this is done as an internal manipulation within the
9932 constant folder, in order to find the simplest representation of
9933 the arguments so that their form can be studied. In any cases,
9934 the appropriate type conversions should be put back in the tree
9935 that will get out of the constant folder. */
9951 if (TREE_CODE (arg0) == CONSTRUCTOR
9952 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9954 unsigned HOST_WIDE_INT idx;
9956 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9963 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9964 so all simple results must be passed through pedantic_non_lvalue. */
9965 if (TREE_CODE (arg0) == INTEGER_CST)
9967 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9968 tem = integer_zerop (arg0) ? op2 : op1;
9969 /* Only optimize constant conditions when the selected branch
9970 has the same type as the COND_EXPR. This avoids optimizing
9971 away "c ? x : throw", where the throw has a void type.
9972 Avoid throwing away that operand which contains label. */
9973 if ((!TREE_SIDE_EFFECTS (unused_op)
9974 || !contains_label_p (unused_op))
9975 && (! VOID_TYPE_P (TREE_TYPE (tem))
9976 || VOID_TYPE_P (type)))
9977 return pedantic_non_lvalue (tem);
9980 if (operand_equal_p (arg1, op2, 0))
9981 return pedantic_omit_one_operand (type, arg1, arg0);
9983 /* If we have A op B ? A : C, we may be able to convert this to a
9984 simpler expression, depending on the operation and the values
9985 of B and C. Signed zeros prevent all of these transformations,
9986 for reasons given above each one.
9988 Also try swapping the arguments and inverting the conditional. */
9989 if (COMPARISON_CLASS_P (arg0)
9990 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9991 arg1, TREE_OPERAND (arg0, 1))
9992 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9994 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9999 if (COMPARISON_CLASS_P (arg0)
10000 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10002 TREE_OPERAND (arg0, 1))
10003 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10005 tem = invert_truthvalue (arg0);
10006 if (COMPARISON_CLASS_P (tem))
10008 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10014 /* If the second operand is simpler than the third, swap them
10015 since that produces better jump optimization results. */
10016 if (truth_value_p (TREE_CODE (arg0))
10017 && tree_swap_operands_p (op1, op2, false))
10019 /* See if this can be inverted. If it can't, possibly because
10020 it was a floating-point inequality comparison, don't do
10022 tem = invert_truthvalue (arg0);
10024 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10025 return fold_build3 (code, type, tem, op2, op1);
10028 /* Convert A ? 1 : 0 to simply A. */
10029 if (integer_onep (op1)
10030 && integer_zerop (op2)
10031 /* If we try to convert OP0 to our type, the
10032 call to fold will try to move the conversion inside
10033 a COND, which will recurse. In that case, the COND_EXPR
10034 is probably the best choice, so leave it alone. */
10035 && type == TREE_TYPE (arg0))
10036 return pedantic_non_lvalue (arg0);
10038 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10039 over COND_EXPR in cases such as floating point comparisons. */
10040 if (integer_zerop (op1)
10041 && integer_onep (op2)
10042 && truth_value_p (TREE_CODE (arg0)))
10043 return pedantic_non_lvalue (fold_convert (type,
10044 invert_truthvalue (arg0)));
10046 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10047 if (TREE_CODE (arg0) == LT_EXPR
10048 && integer_zerop (TREE_OPERAND (arg0, 1))
10049 && integer_zerop (op2)
10050 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10051 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10052 TREE_TYPE (tem), tem, arg1));
10054 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10055 already handled above. */
10056 if (TREE_CODE (arg0) == BIT_AND_EXPR
10057 && integer_onep (TREE_OPERAND (arg0, 1))
10058 && integer_zerop (op2)
10059 && integer_pow2p (arg1))
10061 tree tem = TREE_OPERAND (arg0, 0);
10063 if (TREE_CODE (tem) == RSHIFT_EXPR
10064 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10065 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10066 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10067 return fold_build2 (BIT_AND_EXPR, type,
10068 TREE_OPERAND (tem, 0), arg1);
10071 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10072 is probably obsolete because the first operand should be a
10073 truth value (that's why we have the two cases above), but let's
10074 leave it in until we can confirm this for all front-ends. */
10075 if (integer_zerop (op2)
10076 && TREE_CODE (arg0) == NE_EXPR
10077 && integer_zerop (TREE_OPERAND (arg0, 1))
10078 && integer_pow2p (arg1)
10079 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10080 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10081 arg1, OEP_ONLY_CONST))
10082 return pedantic_non_lvalue (fold_convert (type,
10083 TREE_OPERAND (arg0, 0)));
10085 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10086 if (integer_zerop (op2)
10087 && truth_value_p (TREE_CODE (arg0))
10088 && truth_value_p (TREE_CODE (arg1)))
10089 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10091 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10092 if (integer_onep (op2)
10093 && truth_value_p (TREE_CODE (arg0))
10094 && truth_value_p (TREE_CODE (arg1)))
10096 /* Only perform transformation if ARG0 is easily inverted. */
10097 tem = invert_truthvalue (arg0);
10098 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10099 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10102 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10103 if (integer_zerop (arg1)
10104 && truth_value_p (TREE_CODE (arg0))
10105 && truth_value_p (TREE_CODE (op2)))
10107 /* Only perform transformation if ARG0 is easily inverted. */
10108 tem = invert_truthvalue (arg0);
10109 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10110 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10113 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10114 if (integer_onep (arg1)
10115 && truth_value_p (TREE_CODE (arg0))
10116 && truth_value_p (TREE_CODE (op2)))
10117 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10122 /* Check for a built-in function. */
10123 if (TREE_CODE (op0) == ADDR_EXPR
10124 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10125 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10126 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10129 case BIT_FIELD_REF:
10130 if (TREE_CODE (arg0) == VECTOR_CST
10131 && type == TREE_TYPE (TREE_TYPE (arg0))
10132 && host_integerp (arg1, 1)
10133 && host_integerp (op2, 1))
10135 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10136 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10139 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10140 && (idx % width) == 0
10141 && (idx = idx / width)
10142 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10144 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10145 while (idx-- > 0 && elements)
10146 elements = TREE_CHAIN (elements);
10148 return TREE_VALUE (elements);
10150 return fold_convert (type, integer_zero_node);
10157 } /* switch (code) */
10160 /* Perform constant folding and related simplification of EXPR.
10161 The related simplifications include x*1 => x, x*0 => 0, etc.,
10162 and application of the associative law.
10163 NOP_EXPR conversions may be removed freely (as long as we
10164 are careful not to change the type of the overall expression).
10165 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10166 but we can constant-fold them if they have constant operands. */
10168 #ifdef ENABLE_FOLD_CHECKING
10169 # define fold(x) fold_1 (x)
10170 static tree fold_1 (tree);
10176 const tree t = expr;
10177 enum tree_code code = TREE_CODE (t);
10178 enum tree_code_class kind = TREE_CODE_CLASS (code);
10181 /* Return right away if a constant. */
10182 if (kind == tcc_constant)
10185 if (IS_EXPR_CODE_CLASS (kind))
10187 tree type = TREE_TYPE (t);
10188 tree op0, op1, op2;
10190 switch (TREE_CODE_LENGTH (code))
10193 op0 = TREE_OPERAND (t, 0);
10194 tem = fold_unary (code, type, op0);
10195 return tem ? tem : expr;
10197 op0 = TREE_OPERAND (t, 0);
10198 op1 = TREE_OPERAND (t, 1);
10199 tem = fold_binary (code, type, op0, op1);
10200 return tem ? tem : expr;
10202 op0 = TREE_OPERAND (t, 0);
10203 op1 = TREE_OPERAND (t, 1);
10204 op2 = TREE_OPERAND (t, 2);
10205 tem = fold_ternary (code, type, op0, op1, op2);
10206 return tem ? tem : expr;
10215 return fold (DECL_INITIAL (t));
10219 } /* switch (code) */
10222 #ifdef ENABLE_FOLD_CHECKING
10225 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10226 static void fold_check_failed (tree, tree);
10227 void print_fold_checksum (tree);
10229 /* When --enable-checking=fold, compute a digest of expr before
10230 and after actual fold call to see if fold did not accidentally
10231 change original expr. */
10237 struct md5_ctx ctx;
10238 unsigned char checksum_before[16], checksum_after[16];
10241 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10242 md5_init_ctx (&ctx);
10243 fold_checksum_tree (expr, &ctx, ht);
10244 md5_finish_ctx (&ctx, checksum_before);
10247 ret = fold_1 (expr);
10249 md5_init_ctx (&ctx);
10250 fold_checksum_tree (expr, &ctx, ht);
10251 md5_finish_ctx (&ctx, checksum_after);
10254 if (memcmp (checksum_before, checksum_after, 16))
10255 fold_check_failed (expr, ret);
10261 print_fold_checksum (tree expr)
10263 struct md5_ctx ctx;
10264 unsigned char checksum[16], cnt;
10267 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10268 md5_init_ctx (&ctx);
10269 fold_checksum_tree (expr, &ctx, ht);
10270 md5_finish_ctx (&ctx, checksum);
10272 for (cnt = 0; cnt < 16; ++cnt)
10273 fprintf (stderr, "%02x", checksum[cnt]);
10274 putc ('\n', stderr);
10278 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10280 internal_error ("fold check: original tree changed by fold");
10284 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10287 enum tree_code code;
10288 char buf[sizeof (struct tree_function_decl)];
10293 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10294 <= sizeof (struct tree_function_decl))
10295 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10298 slot = htab_find_slot (ht, expr, INSERT);
10302 code = TREE_CODE (expr);
10303 if (TREE_CODE_CLASS (code) == tcc_declaration
10304 && DECL_ASSEMBLER_NAME_SET_P (expr))
10306 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10307 memcpy (buf, expr, tree_size (expr));
10309 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10311 else if (TREE_CODE_CLASS (code) == tcc_type
10312 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10313 || TYPE_CACHED_VALUES_P (expr)
10314 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10316 /* Allow these fields to be modified. */
10317 memcpy (buf, expr, tree_size (expr));
10319 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10320 TYPE_POINTER_TO (expr) = NULL;
10321 TYPE_REFERENCE_TO (expr) = NULL;
10322 if (TYPE_CACHED_VALUES_P (expr))
10324 TYPE_CACHED_VALUES_P (expr) = 0;
10325 TYPE_CACHED_VALUES (expr) = NULL;
10328 md5_process_bytes (expr, tree_size (expr), ctx);
10329 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10330 if (TREE_CODE_CLASS (code) != tcc_type
10331 && TREE_CODE_CLASS (code) != tcc_declaration
10332 && code != TREE_LIST)
10333 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10334 switch (TREE_CODE_CLASS (code))
10340 md5_process_bytes (TREE_STRING_POINTER (expr),
10341 TREE_STRING_LENGTH (expr), ctx);
10344 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10345 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10348 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10354 case tcc_exceptional:
10358 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10359 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10360 expr = TREE_CHAIN (expr);
10361 goto recursive_label;
10364 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10365 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10371 case tcc_expression:
10372 case tcc_reference:
10373 case tcc_comparison:
10376 case tcc_statement:
10377 len = TREE_CODE_LENGTH (code);
10378 for (i = 0; i < len; ++i)
10379 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10381 case tcc_declaration:
10382 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10383 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10384 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10385 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10386 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10387 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10388 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10389 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10390 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10392 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10394 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10395 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10396 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10400 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10401 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10402 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10403 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10404 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10405 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10406 if (INTEGRAL_TYPE_P (expr)
10407 || SCALAR_FLOAT_TYPE_P (expr))
10409 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10410 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10412 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10413 if (TREE_CODE (expr) == RECORD_TYPE
10414 || TREE_CODE (expr) == UNION_TYPE
10415 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10416 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10417 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10426 /* Fold a unary tree expression with code CODE of type TYPE with an
10427 operand OP0. Return a folded expression if successful. Otherwise,
10428 return a tree expression with code CODE of type TYPE with an
10432 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10435 #ifdef ENABLE_FOLD_CHECKING
10436 unsigned char checksum_before[16], checksum_after[16];
10437 struct md5_ctx ctx;
10440 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10441 md5_init_ctx (&ctx);
10442 fold_checksum_tree (op0, &ctx, ht);
10443 md5_finish_ctx (&ctx, checksum_before);
10447 tem = fold_unary (code, type, op0);
10449 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10451 #ifdef ENABLE_FOLD_CHECKING
10452 md5_init_ctx (&ctx);
10453 fold_checksum_tree (op0, &ctx, ht);
10454 md5_finish_ctx (&ctx, checksum_after);
10457 if (memcmp (checksum_before, checksum_after, 16))
10458 fold_check_failed (op0, tem);
10463 /* Fold a binary tree expression with code CODE of type TYPE with
10464 operands OP0 and OP1. Return a folded expression if successful.
10465 Otherwise, return a tree expression with code CODE of type TYPE
10466 with operands OP0 and OP1. */
10469 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10473 #ifdef ENABLE_FOLD_CHECKING
10474 unsigned char checksum_before_op0[16],
10475 checksum_before_op1[16],
10476 checksum_after_op0[16],
10477 checksum_after_op1[16];
10478 struct md5_ctx ctx;
10481 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10482 md5_init_ctx (&ctx);
10483 fold_checksum_tree (op0, &ctx, ht);
10484 md5_finish_ctx (&ctx, checksum_before_op0);
10487 md5_init_ctx (&ctx);
10488 fold_checksum_tree (op1, &ctx, ht);
10489 md5_finish_ctx (&ctx, checksum_before_op1);
10493 tem = fold_binary (code, type, op0, op1);
10495 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10497 #ifdef ENABLE_FOLD_CHECKING
10498 md5_init_ctx (&ctx);
10499 fold_checksum_tree (op0, &ctx, ht);
10500 md5_finish_ctx (&ctx, checksum_after_op0);
10503 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10504 fold_check_failed (op0, tem);
10506 md5_init_ctx (&ctx);
10507 fold_checksum_tree (op1, &ctx, ht);
10508 md5_finish_ctx (&ctx, checksum_after_op1);
10511 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10512 fold_check_failed (op1, tem);
10517 /* Fold a ternary tree expression with code CODE of type TYPE with
10518 operands OP0, OP1, and OP2. Return a folded expression if
10519 successful. Otherwise, return a tree expression with code CODE of
10520 type TYPE with operands OP0, OP1, and OP2. */
10523 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10527 #ifdef ENABLE_FOLD_CHECKING
10528 unsigned char checksum_before_op0[16],
10529 checksum_before_op1[16],
10530 checksum_before_op2[16],
10531 checksum_after_op0[16],
10532 checksum_after_op1[16],
10533 checksum_after_op2[16];
10534 struct md5_ctx ctx;
10537 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10538 md5_init_ctx (&ctx);
10539 fold_checksum_tree (op0, &ctx, ht);
10540 md5_finish_ctx (&ctx, checksum_before_op0);
10543 md5_init_ctx (&ctx);
10544 fold_checksum_tree (op1, &ctx, ht);
10545 md5_finish_ctx (&ctx, checksum_before_op1);
10548 md5_init_ctx (&ctx);
10549 fold_checksum_tree (op2, &ctx, ht);
10550 md5_finish_ctx (&ctx, checksum_before_op2);
10554 tem = fold_ternary (code, type, op0, op1, op2);
10556 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10558 #ifdef ENABLE_FOLD_CHECKING
10559 md5_init_ctx (&ctx);
10560 fold_checksum_tree (op0, &ctx, ht);
10561 md5_finish_ctx (&ctx, checksum_after_op0);
10564 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10565 fold_check_failed (op0, tem);
10567 md5_init_ctx (&ctx);
10568 fold_checksum_tree (op1, &ctx, ht);
10569 md5_finish_ctx (&ctx, checksum_after_op1);
10572 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10573 fold_check_failed (op1, tem);
10575 md5_init_ctx (&ctx);
10576 fold_checksum_tree (op2, &ctx, ht);
10577 md5_finish_ctx (&ctx, checksum_after_op2);
10580 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10581 fold_check_failed (op2, tem);
10586 /* Perform constant folding and related simplification of initializer
10587 expression EXPR. These behave identically to "fold_buildN" but ignore
10588 potential run-time traps and exceptions that fold must preserve. */
10590 #define START_FOLD_INIT \
10591 int saved_signaling_nans = flag_signaling_nans;\
10592 int saved_trapping_math = flag_trapping_math;\
10593 int saved_rounding_math = flag_rounding_math;\
10594 int saved_trapv = flag_trapv;\
10595 flag_signaling_nans = 0;\
10596 flag_trapping_math = 0;\
10597 flag_rounding_math = 0;\
10600 #define END_FOLD_INIT \
10601 flag_signaling_nans = saved_signaling_nans;\
10602 flag_trapping_math = saved_trapping_math;\
10603 flag_rounding_math = saved_rounding_math;\
10604 flag_trapv = saved_trapv
10607 fold_build1_initializer (enum tree_code code, tree type, tree op)
10612 result = fold_build1 (code, type, op);
10619 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10624 result = fold_build2 (code, type, op0, op1);
10631 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10637 result = fold_build3 (code, type, op0, op1, op2);
10643 #undef START_FOLD_INIT
10644 #undef END_FOLD_INIT
10646 /* Determine if first argument is a multiple of second argument. Return 0 if
10647 it is not, or we cannot easily determined it to be.
10649 An example of the sort of thing we care about (at this point; this routine
10650 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10651 fold cases do now) is discovering that
10653 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10659 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10661 This code also handles discovering that
10663 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10665 is a multiple of 8 so we don't have to worry about dealing with a
10666 possible remainder.
10668 Note that we *look* inside a SAVE_EXPR only to determine how it was
10669 calculated; it is not safe for fold to do much of anything else with the
10670 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10671 at run time. For example, the latter example above *cannot* be implemented
10672 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10673 evaluation time of the original SAVE_EXPR is not necessarily the same at
10674 the time the new expression is evaluated. The only optimization of this
10675 sort that would be valid is changing
10677 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10681 SAVE_EXPR (I) * SAVE_EXPR (J)
10683 (where the same SAVE_EXPR (J) is used in the original and the
10684 transformed version). */
10687 multiple_of_p (tree type, tree top, tree bottom)
10689 if (operand_equal_p (top, bottom, 0))
10692 if (TREE_CODE (type) != INTEGER_TYPE)
10695 switch (TREE_CODE (top))
10698 /* Bitwise and provides a power of two multiple. If the mask is
10699 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10700 if (!integer_pow2p (bottom))
10705 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10706 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10710 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10711 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10714 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10718 op1 = TREE_OPERAND (top, 1);
10719 /* const_binop may not detect overflow correctly,
10720 so check for it explicitly here. */
10721 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10722 > TREE_INT_CST_LOW (op1)
10723 && TREE_INT_CST_HIGH (op1) == 0
10724 && 0 != (t1 = fold_convert (type,
10725 const_binop (LSHIFT_EXPR,
10728 && ! TREE_OVERFLOW (t1))
10729 return multiple_of_p (type, t1, bottom);
10734 /* Can't handle conversions from non-integral or wider integral type. */
10735 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10736 || (TYPE_PRECISION (type)
10737 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10740 /* .. fall through ... */
10743 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10746 if (TREE_CODE (bottom) != INTEGER_CST
10747 || (TYPE_UNSIGNED (type)
10748 && (tree_int_cst_sgn (top) < 0
10749 || tree_int_cst_sgn (bottom) < 0)))
10751 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10759 /* Return true if `t' is known to be non-negative. */
10762 tree_expr_nonnegative_p (tree t)
10764 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10767 switch (TREE_CODE (t))
10770 /* We can't return 1 if flag_wrapv is set because
10771 ABS_EXPR<INT_MIN> = INT_MIN. */
10772 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10777 return tree_int_cst_sgn (t) >= 0;
10780 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10783 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10784 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10785 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10787 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10788 both unsigned and at least 2 bits shorter than the result. */
10789 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10790 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10791 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10793 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10794 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10795 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10796 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10798 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10799 TYPE_PRECISION (inner2)) + 1;
10800 return prec < TYPE_PRECISION (TREE_TYPE (t));
10806 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10808 /* x * x for floating point x is always non-negative. */
10809 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10811 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10812 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10815 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10816 both unsigned and their total bits is shorter than the result. */
10817 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10818 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10819 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10821 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10822 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10823 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10824 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10825 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10826 < TYPE_PRECISION (TREE_TYPE (t));
10832 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10833 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10839 case TRUNC_DIV_EXPR:
10840 case CEIL_DIV_EXPR:
10841 case FLOOR_DIV_EXPR:
10842 case ROUND_DIV_EXPR:
10843 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10844 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10846 case TRUNC_MOD_EXPR:
10847 case CEIL_MOD_EXPR:
10848 case FLOOR_MOD_EXPR:
10849 case ROUND_MOD_EXPR:
10851 case NON_LVALUE_EXPR:
10853 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10855 case COMPOUND_EXPR:
10857 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10860 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10863 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10864 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10868 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10869 tree outer_type = TREE_TYPE (t);
10871 if (TREE_CODE (outer_type) == REAL_TYPE)
10873 if (TREE_CODE (inner_type) == REAL_TYPE)
10874 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10875 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10877 if (TYPE_UNSIGNED (inner_type))
10879 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10882 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10884 if (TREE_CODE (inner_type) == REAL_TYPE)
10885 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10886 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10887 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10888 && TYPE_UNSIGNED (inner_type);
10895 tree temp = TARGET_EXPR_SLOT (t);
10896 t = TARGET_EXPR_INITIAL (t);
10898 /* If the initializer is non-void, then it's a normal expression
10899 that will be assigned to the slot. */
10900 if (!VOID_TYPE_P (t))
10901 return tree_expr_nonnegative_p (t);
10903 /* Otherwise, the initializer sets the slot in some way. One common
10904 way is an assignment statement at the end of the initializer. */
10907 if (TREE_CODE (t) == BIND_EXPR)
10908 t = expr_last (BIND_EXPR_BODY (t));
10909 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10910 || TREE_CODE (t) == TRY_CATCH_EXPR)
10911 t = expr_last (TREE_OPERAND (t, 0));
10912 else if (TREE_CODE (t) == STATEMENT_LIST)
10917 if (TREE_CODE (t) == MODIFY_EXPR
10918 && TREE_OPERAND (t, 0) == temp)
10919 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10926 tree fndecl = get_callee_fndecl (t);
10927 tree arglist = TREE_OPERAND (t, 1);
10928 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10929 switch (DECL_FUNCTION_CODE (fndecl))
10931 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10932 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10933 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10934 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10936 CASE_BUILTIN_F (BUILT_IN_ACOS)
10937 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10938 CASE_BUILTIN_F (BUILT_IN_CABS)
10939 CASE_BUILTIN_F (BUILT_IN_COSH)
10940 CASE_BUILTIN_F (BUILT_IN_ERFC)
10941 CASE_BUILTIN_F (BUILT_IN_EXP)
10942 CASE_BUILTIN_F (BUILT_IN_EXP10)
10943 CASE_BUILTIN_F (BUILT_IN_EXP2)
10944 CASE_BUILTIN_F (BUILT_IN_FABS)
10945 CASE_BUILTIN_F (BUILT_IN_FDIM)
10946 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10947 CASE_BUILTIN_F (BUILT_IN_POW10)
10948 CASE_BUILTIN_I (BUILT_IN_FFS)
10949 CASE_BUILTIN_I (BUILT_IN_PARITY)
10950 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10954 CASE_BUILTIN_F (BUILT_IN_SQRT)
10955 /* sqrt(-0.0) is -0.0. */
10956 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10958 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10960 CASE_BUILTIN_F (BUILT_IN_ASINH)
10961 CASE_BUILTIN_F (BUILT_IN_ATAN)
10962 CASE_BUILTIN_F (BUILT_IN_ATANH)
10963 CASE_BUILTIN_F (BUILT_IN_CBRT)
10964 CASE_BUILTIN_F (BUILT_IN_CEIL)
10965 CASE_BUILTIN_F (BUILT_IN_ERF)
10966 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10967 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10968 CASE_BUILTIN_F (BUILT_IN_FMOD)
10969 CASE_BUILTIN_F (BUILT_IN_FREXP)
10970 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10971 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10972 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10973 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10974 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10975 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10976 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10977 CASE_BUILTIN_F (BUILT_IN_LRINT)
10978 CASE_BUILTIN_F (BUILT_IN_LROUND)
10979 CASE_BUILTIN_F (BUILT_IN_MODF)
10980 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10981 CASE_BUILTIN_F (BUILT_IN_POW)
10982 CASE_BUILTIN_F (BUILT_IN_RINT)
10983 CASE_BUILTIN_F (BUILT_IN_ROUND)
10984 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10985 CASE_BUILTIN_F (BUILT_IN_SINH)
10986 CASE_BUILTIN_F (BUILT_IN_TANH)
10987 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10988 /* True if the 1st argument is nonnegative. */
10989 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10991 CASE_BUILTIN_F (BUILT_IN_FMAX)
10992 /* True if the 1st OR 2nd arguments are nonnegative. */
10993 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10994 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10996 CASE_BUILTIN_F (BUILT_IN_FMIN)
10997 /* True if the 1st AND 2nd arguments are nonnegative. */
10998 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10999 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11001 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11002 /* True if the 2nd argument is nonnegative. */
11003 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11007 #undef CASE_BUILTIN_F
11008 #undef CASE_BUILTIN_I
11012 /* ... fall through ... */
11015 if (truth_value_p (TREE_CODE (t)))
11016 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11020 /* We don't know sign of `t', so be conservative and return false. */
11024 /* Return true when T is an address and is known to be nonzero.
11025 For floating point we further ensure that T is not denormal.
11026 Similar logic is present in nonzero_address in rtlanal.h. */
11029 tree_expr_nonzero_p (tree t)
11031 tree type = TREE_TYPE (t);
11033 /* Doing something useful for floating point would need more work. */
11034 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11037 switch (TREE_CODE (t))
11040 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11043 /* We used to test for !integer_zerop here. This does not work correctly
11044 if TREE_CONSTANT_OVERFLOW (t). */
11045 return (TREE_INT_CST_LOW (t) != 0
11046 || TREE_INT_CST_HIGH (t) != 0);
11049 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11051 /* With the presence of negative values it is hard
11052 to say something. */
11053 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11054 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11056 /* One of operands must be positive and the other non-negative. */
11057 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11058 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11063 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11065 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11066 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11072 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11073 tree outer_type = TREE_TYPE (t);
11075 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11076 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11082 tree base = get_base_address (TREE_OPERAND (t, 0));
11087 /* Weak declarations may link to NULL. */
11088 if (VAR_OR_FUNCTION_DECL_P (base))
11089 return !DECL_WEAK (base);
11091 /* Constants are never weak. */
11092 if (CONSTANT_CLASS_P (base))
11099 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11100 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11103 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11104 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11107 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11109 /* When both operands are nonzero, then MAX must be too. */
11110 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11113 /* MAX where operand 0 is positive is positive. */
11114 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11116 /* MAX where operand 1 is positive is positive. */
11117 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11118 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11122 case COMPOUND_EXPR:
11125 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11128 case NON_LVALUE_EXPR:
11129 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11132 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11133 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11136 return alloca_call_p (t);
11144 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11145 attempt to fold the expression to a constant without modifying TYPE,
11148 If the expression could be simplified to a constant, then return
11149 the constant. If the expression would not be simplified to a
11150 constant, then return NULL_TREE. */
11153 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11155 tree tem = fold_binary (code, type, op0, op1);
11156 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11159 /* Given the components of a unary expression CODE, TYPE and OP0,
11160 attempt to fold the expression to a constant without modifying
11163 If the expression could be simplified to a constant, then return
11164 the constant. If the expression would not be simplified to a
11165 constant, then return NULL_TREE. */
11168 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11170 tree tem = fold_unary (code, type, op0);
11171 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11174 /* If EXP represents referencing an element in a constant string
11175 (either via pointer arithmetic or array indexing), return the
11176 tree representing the value accessed, otherwise return NULL. */
11179 fold_read_from_constant_string (tree exp)
11181 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11183 tree exp1 = TREE_OPERAND (exp, 0);
11187 if (TREE_CODE (exp) == INDIRECT_REF)
11188 string = string_constant (exp1, &index);
11191 tree low_bound = array_ref_low_bound (exp);
11192 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11194 /* Optimize the special-case of a zero lower bound.
11196 We convert the low_bound to sizetype to avoid some problems
11197 with constant folding. (E.g. suppose the lower bound is 1,
11198 and its mode is QI. Without the conversion,l (ARRAY
11199 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11200 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11201 if (! integer_zerop (low_bound))
11202 index = size_diffop (index, fold_convert (sizetype, low_bound));
11208 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11209 && TREE_CODE (string) == STRING_CST
11210 && TREE_CODE (index) == INTEGER_CST
11211 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11212 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11214 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11215 return fold_convert (TREE_TYPE (exp),
11216 build_int_cst (NULL_TREE,
11217 (TREE_STRING_POINTER (string)
11218 [TREE_INT_CST_LOW (index)])));
11223 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11224 an integer constant or real constant.
11226 TYPE is the type of the result. */
11229 fold_negate_const (tree arg0, tree type)
11231 tree t = NULL_TREE;
11233 switch (TREE_CODE (arg0))
11237 unsigned HOST_WIDE_INT low;
11238 HOST_WIDE_INT high;
11239 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11240 TREE_INT_CST_HIGH (arg0),
11242 t = build_int_cst_wide (type, low, high);
11243 t = force_fit_type (t, 1,
11244 (overflow | TREE_OVERFLOW (arg0))
11245 && !TYPE_UNSIGNED (type),
11246 TREE_CONSTANT_OVERFLOW (arg0));
11251 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11255 gcc_unreachable ();
11261 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11262 an integer constant or real constant.
11264 TYPE is the type of the result. */
11267 fold_abs_const (tree arg0, tree type)
11269 tree t = NULL_TREE;
11271 switch (TREE_CODE (arg0))
11274 /* If the value is unsigned, then the absolute value is
11275 the same as the ordinary value. */
11276 if (TYPE_UNSIGNED (type))
11278 /* Similarly, if the value is non-negative. */
11279 else if (INT_CST_LT (integer_minus_one_node, arg0))
11281 /* If the value is negative, then the absolute value is
11285 unsigned HOST_WIDE_INT low;
11286 HOST_WIDE_INT high;
11287 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11288 TREE_INT_CST_HIGH (arg0),
11290 t = build_int_cst_wide (type, low, high);
11291 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11292 TREE_CONSTANT_OVERFLOW (arg0));
11297 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11298 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11304 gcc_unreachable ();
11310 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11311 constant. TYPE is the type of the result. */
11314 fold_not_const (tree arg0, tree type)
11316 tree t = NULL_TREE;
11318 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11320 t = build_int_cst_wide (type,
11321 ~ TREE_INT_CST_LOW (arg0),
11322 ~ TREE_INT_CST_HIGH (arg0));
11323 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11324 TREE_CONSTANT_OVERFLOW (arg0));
11329 /* Given CODE, a relational operator, the target type, TYPE and two
11330 constant operands OP0 and OP1, return the result of the
11331 relational operation. If the result is not a compile time
11332 constant, then return NULL_TREE. */
11335 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11337 int result, invert;
11339 /* From here on, the only cases we handle are when the result is
11340 known to be a constant. */
11342 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11344 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11345 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11347 /* Handle the cases where either operand is a NaN. */
11348 if (real_isnan (c0) || real_isnan (c1))
11358 case UNORDERED_EXPR:
11372 if (flag_trapping_math)
11378 gcc_unreachable ();
11381 return constant_boolean_node (result, type);
11384 return constant_boolean_node (real_compare (code, c0, c1), type);
11387 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11389 To compute GT, swap the arguments and do LT.
11390 To compute GE, do LT and invert the result.
11391 To compute LE, swap the arguments, do LT and invert the result.
11392 To compute NE, do EQ and invert the result.
11394 Therefore, the code below must handle only EQ and LT. */
11396 if (code == LE_EXPR || code == GT_EXPR)
11401 code = swap_tree_comparison (code);
11404 /* Note that it is safe to invert for real values here because we
11405 have already handled the one case that it matters. */
11408 if (code == NE_EXPR || code == GE_EXPR)
11411 code = invert_tree_comparison (code, false);
11414 /* Compute a result for LT or EQ if args permit;
11415 Otherwise return T. */
11416 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11418 if (code == EQ_EXPR)
11419 result = tree_int_cst_equal (op0, op1);
11420 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11421 result = INT_CST_LT_UNSIGNED (op0, op1);
11423 result = INT_CST_LT (op0, op1);
11430 return constant_boolean_node (result, type);
11433 /* Build an expression for the a clean point containing EXPR with type TYPE.
11434 Don't build a cleanup point expression for EXPR which don't have side
11438 fold_build_cleanup_point_expr (tree type, tree expr)
11440 /* If the expression does not have side effects then we don't have to wrap
11441 it with a cleanup point expression. */
11442 if (!TREE_SIDE_EFFECTS (expr))
11445 /* If the expression is a return, check to see if the expression inside the
11446 return has no side effects or the right hand side of the modify expression
11447 inside the return. If either don't have side effects set we don't need to
11448 wrap the expression in a cleanup point expression. Note we don't check the
11449 left hand side of the modify because it should always be a return decl. */
11450 if (TREE_CODE (expr) == RETURN_EXPR)
11452 tree op = TREE_OPERAND (expr, 0);
11453 if (!op || !TREE_SIDE_EFFECTS (op))
11455 op = TREE_OPERAND (op, 1);
11456 if (!TREE_SIDE_EFFECTS (op))
11460 return build1 (CLEANUP_POINT_EXPR, type, expr);
11463 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11464 avoid confusing the gimplify process. */
11467 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11469 /* The size of the object is not relevant when talking about its address. */
11470 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11471 t = TREE_OPERAND (t, 0);
11473 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11474 if (TREE_CODE (t) == INDIRECT_REF
11475 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11477 t = TREE_OPERAND (t, 0);
11478 if (TREE_TYPE (t) != ptrtype)
11479 t = build1 (NOP_EXPR, ptrtype, t);
11485 while (handled_component_p (base))
11486 base = TREE_OPERAND (base, 0);
11488 TREE_ADDRESSABLE (base) = 1;
11490 t = build1 (ADDR_EXPR, ptrtype, t);
11497 build_fold_addr_expr (tree t)
11499 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11502 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11503 of an indirection through OP0, or NULL_TREE if no simplification is
11507 fold_indirect_ref_1 (tree type, tree op0)
11513 subtype = TREE_TYPE (sub);
11514 if (!POINTER_TYPE_P (subtype))
11517 if (TREE_CODE (sub) == ADDR_EXPR)
11519 tree op = TREE_OPERAND (sub, 0);
11520 tree optype = TREE_TYPE (op);
11522 if (type == optype)
11524 /* *(foo *)&fooarray => fooarray[0] */
11525 else if (TREE_CODE (optype) == ARRAY_TYPE
11526 && type == TREE_TYPE (optype))
11528 tree type_domain = TYPE_DOMAIN (optype);
11529 tree min_val = size_zero_node;
11530 if (type_domain && TYPE_MIN_VALUE (type_domain))
11531 min_val = TYPE_MIN_VALUE (type_domain);
11532 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11536 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11537 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11538 && type == TREE_TYPE (TREE_TYPE (subtype)))
11541 tree min_val = size_zero_node;
11542 sub = build_fold_indirect_ref (sub);
11543 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11544 if (type_domain && TYPE_MIN_VALUE (type_domain))
11545 min_val = TYPE_MIN_VALUE (type_domain);
11546 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11552 /* Builds an expression for an indirection through T, simplifying some
11556 build_fold_indirect_ref (tree t)
11558 tree type = TREE_TYPE (TREE_TYPE (t));
11559 tree sub = fold_indirect_ref_1 (type, t);
11564 return build1 (INDIRECT_REF, type, t);
11567 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11570 fold_indirect_ref (tree t)
11572 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11580 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11581 whose result is ignored. The type of the returned tree need not be
11582 the same as the original expression. */
11585 fold_ignored_result (tree t)
11587 if (!TREE_SIDE_EFFECTS (t))
11588 return integer_zero_node;
11591 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11594 t = TREE_OPERAND (t, 0);
11598 case tcc_comparison:
11599 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11600 t = TREE_OPERAND (t, 0);
11601 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11602 t = TREE_OPERAND (t, 1);
11607 case tcc_expression:
11608 switch (TREE_CODE (t))
11610 case COMPOUND_EXPR:
11611 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11613 t = TREE_OPERAND (t, 0);
11617 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11618 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11620 t = TREE_OPERAND (t, 0);
11633 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11634 This can only be applied to objects of a sizetype. */
11637 round_up (tree value, int divisor)
11639 tree div = NULL_TREE;
11641 gcc_assert (divisor > 0);
11645 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11646 have to do anything. Only do this when we are not given a const,
11647 because in that case, this check is more expensive than just
11649 if (TREE_CODE (value) != INTEGER_CST)
11651 div = build_int_cst (TREE_TYPE (value), divisor);
11653 if (multiple_of_p (TREE_TYPE (value), value, div))
11657 /* If divisor is a power of two, simplify this to bit manipulation. */
11658 if (divisor == (divisor & -divisor))
11662 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11663 value = size_binop (PLUS_EXPR, value, t);
11664 t = build_int_cst (TREE_TYPE (value), -divisor);
11665 value = size_binop (BIT_AND_EXPR, value, t);
11670 div = build_int_cst (TREE_TYPE (value), divisor);
11671 value = size_binop (CEIL_DIV_EXPR, value, div);
11672 value = size_binop (MULT_EXPR, value, div);
11678 /* Likewise, but round down. */
11681 round_down (tree value, int divisor)
11683 tree div = NULL_TREE;
11685 gcc_assert (divisor > 0);
11689 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11690 have to do anything. Only do this when we are not given a const,
11691 because in that case, this check is more expensive than just
11693 if (TREE_CODE (value) != INTEGER_CST)
11695 div = build_int_cst (TREE_TYPE (value), divisor);
11697 if (multiple_of_p (TREE_TYPE (value), value, div))
11701 /* If divisor is a power of two, simplify this to bit manipulation. */
11702 if (divisor == (divisor & -divisor))
11706 t = build_int_cst (TREE_TYPE (value), -divisor);
11707 value = size_binop (BIT_AND_EXPR, value, t);
11712 div = build_int_cst (TREE_TYPE (value), divisor);
11713 value = size_binop (FLOOR_DIV_EXPR, value, div);
11714 value = size_binop (MULT_EXPR, value, div);
11720 /* Returns the pointer to the base of the object addressed by EXP and
11721 extracts the information about the offset of the access, storing it
11722 to PBITPOS and POFFSET. */
11725 split_address_to_core_and_offset (tree exp,
11726 HOST_WIDE_INT *pbitpos, tree *poffset)
11729 enum machine_mode mode;
11730 int unsignedp, volatilep;
11731 HOST_WIDE_INT bitsize;
11733 if (TREE_CODE (exp) == ADDR_EXPR)
11735 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11736 poffset, &mode, &unsignedp, &volatilep,
11738 core = build_fold_addr_expr (core);
11744 *poffset = NULL_TREE;
11750 /* Returns true if addresses of E1 and E2 differ by a constant, false
11751 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11754 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11757 HOST_WIDE_INT bitpos1, bitpos2;
11758 tree toffset1, toffset2, tdiff, type;
11760 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11761 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11763 if (bitpos1 % BITS_PER_UNIT != 0
11764 || bitpos2 % BITS_PER_UNIT != 0
11765 || !operand_equal_p (core1, core2, 0))
11768 if (toffset1 && toffset2)
11770 type = TREE_TYPE (toffset1);
11771 if (type != TREE_TYPE (toffset2))
11772 toffset2 = fold_convert (type, toffset2);
11774 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11775 if (!cst_and_fits_in_hwi (tdiff))
11778 *diff = int_cst_value (tdiff);
11780 else if (toffset1 || toffset2)
11782 /* If only one of the offsets is non-constant, the difference cannot
11789 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11793 /* Simplify the floating point expression EXP when the sign of the
11794 result is not significant. Return NULL_TREE if no simplification
11798 fold_strip_sign_ops (tree exp)
11802 switch (TREE_CODE (exp))
11806 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11807 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11811 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11813 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11814 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11815 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11816 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11817 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11818 arg1 ? arg1 : TREE_OPERAND (exp, 1));