1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 || (overflowable > 0 && sign_extended_type))
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
277 TREE_CONSTANT_OVERFLOW (t) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
298 h = h1 + h2 + (l < l1);
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 return (*hv & h1) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
355 for (j = 0; j < 4; j++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 if (SHIFT_COUNT_TRUNCATED)
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 unsigned HOST_WIDE_INT signmask;
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 if (SHIFT_COUNT_TRUNCATED)
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
663 { /* scale divisor and dividend */
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
742 decode (quo, lquo, hquo);
745 /* If result is negative, make it so. */
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, <wice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if built-in mathematical function specified by CODE
861 preserves the sign of it argument, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code)
888 /* Check whether we may negate an integer constant T without causing
892 may_negate_without_overflow_p (tree t)
894 unsigned HOST_WIDE_INT val;
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
907 if (TREE_INT_CST_LOW (t) != 0)
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
923 negate_expr_p (tree t)
930 type = TREE_TYPE (t);
933 switch (TREE_CODE (t))
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
983 tree tem = strip_float_extensions (t);
985 return negate_expr_p (tem);
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1017 negate_expr (tree t)
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1028 switch (TREE_CODE (t))
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1116 TREE_OPERAND (t, 1)));
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1135 tree fndecl, arg, arglist;
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1186 If IN is itself a literal or constant, return it as appropriate.
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1238 var = op1, neg_var_p = neg1_p;
1240 /* Now do any needed negations. */
1242 *minus_litp = *litp, *litp = 0;
1244 *conp = negate_expr (*conp);
1246 var = negate_expr (var);
1248 else if (TREE_CONSTANT (in))
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1284 if (code == PLUS_EXPR)
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1295 else if (code == MINUS_EXPR)
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1338 low = int1l | int2l, hi = int1h | int2h;
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1346 low = int1l & int2l, hi = int1h & int2h;
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1389 if (code == CEIL_DIV_EXPR)
1392 low = int1l / int2l, hi = 0;
1396 /* ... fall through ... */
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1401 low = int1l, hi = int1h;
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1422 if (code == CEIL_MOD_EXPR)
1424 low = int1l % int2l, hi = 0;
1428 /* ... fall through ... */
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1451 low = int2l, hi = int2h;
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode.
1490 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1493 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1498 if (TREE_CODE (arg1) == INTEGER_CST)
1499 return int_const_binop (code, arg1, arg2, notrunc);
1501 if (TREE_CODE (arg1) == REAL_CST)
1503 enum machine_mode mode;
1506 REAL_VALUE_TYPE value;
1507 REAL_VALUE_TYPE result;
1511 d1 = TREE_REAL_CST (arg1);
1512 d2 = TREE_REAL_CST (arg2);
1514 type = TREE_TYPE (arg1);
1515 mode = TYPE_MODE (type);
1517 /* Don't perform operation if we honor signaling NaNs and
1518 either operand is a NaN. */
1519 if (HONOR_SNANS (mode)
1520 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1523 /* Don't perform operation if it would raise a division
1524 by zero exception. */
1525 if (code == RDIV_EXPR
1526 && REAL_VALUES_EQUAL (d2, dconst0)
1527 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1530 /* If either operand is a NaN, just return it. Otherwise, set up
1531 for floating-point trap; we return an overflow. */
1532 if (REAL_VALUE_ISNAN (d1))
1534 else if (REAL_VALUE_ISNAN (d2))
1537 inexact = real_arithmetic (&value, code, &d1, &d2);
1538 real_convert (&result, mode, &value);
1540 /* Don't constant fold this floating point operation if
1541 the result has overflowed and flag_trapping_math. */
1543 if (flag_trapping_math
1544 && MODE_HAS_INFINITIES (mode)
1545 && REAL_VALUE_ISINF (result)
1546 && !REAL_VALUE_ISINF (d1)
1547 && !REAL_VALUE_ISINF (d2))
1550 /* Don't constant fold this floating point operation if the
1551 result may dependent upon the run-time rounding mode and
1552 flag_rounding_math is set, or if GCC's software emulation
1553 is unable to accurately represent the result. */
1555 if ((flag_rounding_math
1556 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1557 && !flag_unsafe_math_optimizations))
1558 && (inexact || !real_identical (&result, &value)))
1561 t = build_real (type, result);
1563 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1564 TREE_CONSTANT_OVERFLOW (t)
1566 | TREE_CONSTANT_OVERFLOW (arg1)
1567 | TREE_CONSTANT_OVERFLOW (arg2);
1570 if (TREE_CODE (arg1) == COMPLEX_CST)
1572 tree type = TREE_TYPE (arg1);
1573 tree r1 = TREE_REALPART (arg1);
1574 tree i1 = TREE_IMAGPART (arg1);
1575 tree r2 = TREE_REALPART (arg2);
1576 tree i2 = TREE_IMAGPART (arg2);
1582 t = build_complex (type,
1583 const_binop (PLUS_EXPR, r1, r2, notrunc),
1584 const_binop (PLUS_EXPR, i1, i2, notrunc));
1588 t = build_complex (type,
1589 const_binop (MINUS_EXPR, r1, r2, notrunc),
1590 const_binop (MINUS_EXPR, i1, i2, notrunc));
1594 t = build_complex (type,
1595 const_binop (MINUS_EXPR,
1596 const_binop (MULT_EXPR,
1598 const_binop (MULT_EXPR,
1601 const_binop (PLUS_EXPR,
1602 const_binop (MULT_EXPR,
1604 const_binop (MULT_EXPR,
1611 tree t1, t2, real, imag;
1613 = const_binop (PLUS_EXPR,
1614 const_binop (MULT_EXPR, r2, r2, notrunc),
1615 const_binop (MULT_EXPR, i2, i2, notrunc),
1618 t1 = const_binop (PLUS_EXPR,
1619 const_binop (MULT_EXPR, r1, r2, notrunc),
1620 const_binop (MULT_EXPR, i1, i2, notrunc),
1622 t2 = const_binop (MINUS_EXPR,
1623 const_binop (MULT_EXPR, i1, r2, notrunc),
1624 const_binop (MULT_EXPR, r1, i2, notrunc),
1627 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1629 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1630 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1634 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1635 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1640 t = build_complex (type, real, imag);
1652 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1653 indicates which particular sizetype to create. */
1656 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1658 return build_int_cst (sizetype_tab[(int) kind], number);
1661 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1662 is a tree code. The type of the result is taken from the operands.
1663 Both must be the same type integer type and it must be a size type.
1664 If the operands are constant, so is the result. */
1667 size_binop (enum tree_code code, tree arg0, tree arg1)
1669 tree type = TREE_TYPE (arg0);
1671 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1672 && type == TREE_TYPE (arg1));
1674 /* Handle the special case of two integer constants faster. */
1675 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1677 /* And some specific cases even faster than that. */
1678 if (code == PLUS_EXPR && integer_zerop (arg0))
1680 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1681 && integer_zerop (arg1))
1683 else if (code == MULT_EXPR && integer_onep (arg0))
1686 /* Handle general case of two integer constants. */
1687 return int_const_binop (code, arg0, arg1, 0);
1690 if (arg0 == error_mark_node || arg1 == error_mark_node)
1691 return error_mark_node;
1693 return fold_build2 (code, type, arg0, arg1);
1696 /* Given two values, either both of sizetype or both of bitsizetype,
1697 compute the difference between the two values. Return the value
1698 in signed type corresponding to the type of the operands. */
1701 size_diffop (tree arg0, tree arg1)
1703 tree type = TREE_TYPE (arg0);
1706 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1707 && type == TREE_TYPE (arg1));
1709 /* If the type is already signed, just do the simple thing. */
1710 if (!TYPE_UNSIGNED (type))
1711 return size_binop (MINUS_EXPR, arg0, arg1);
1713 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1715 /* If either operand is not a constant, do the conversions to the signed
1716 type and subtract. The hardware will do the right thing with any
1717 overflow in the subtraction. */
1718 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1719 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1720 fold_convert (ctype, arg1));
1722 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1723 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1724 overflow) and negate (which can't either). Special-case a result
1725 of zero while we're here. */
1726 if (tree_int_cst_equal (arg0, arg1))
1727 return fold_convert (ctype, integer_zero_node);
1728 else if (tree_int_cst_lt (arg1, arg0))
1729 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1731 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1732 fold_convert (ctype, size_binop (MINUS_EXPR,
1736 /* A subroutine of fold_convert_const handling conversions of an
1737 INTEGER_CST to another integer type. */
1740 fold_convert_const_int_from_int (tree type, tree arg1)
1744 /* Given an integer constant, make new constant with new type,
1745 appropriately sign-extended or truncated. */
1746 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1747 TREE_INT_CST_HIGH (arg1));
1749 t = force_fit_type (t,
1750 /* Don't set the overflow when
1751 converting a pointer */
1752 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1753 (TREE_INT_CST_HIGH (arg1) < 0
1754 && (TYPE_UNSIGNED (type)
1755 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1756 | TREE_OVERFLOW (arg1),
1757 TREE_CONSTANT_OVERFLOW (arg1));
1762 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1763 to an integer type. */
1766 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1771 /* The following code implements the floating point to integer
1772 conversion rules required by the Java Language Specification,
1773 that IEEE NaNs are mapped to zero and values that overflow
1774 the target precision saturate, i.e. values greater than
1775 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1776 are mapped to INT_MIN. These semantics are allowed by the
1777 C and C++ standards that simply state that the behavior of
1778 FP-to-integer conversion is unspecified upon overflow. */
1780 HOST_WIDE_INT high, low;
1782 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1786 case FIX_TRUNC_EXPR:
1787 real_trunc (&r, VOIDmode, &x);
1791 real_ceil (&r, VOIDmode, &x);
1794 case FIX_FLOOR_EXPR:
1795 real_floor (&r, VOIDmode, &x);
1798 case FIX_ROUND_EXPR:
1799 real_round (&r, VOIDmode, &x);
1806 /* If R is NaN, return zero and show we have an overflow. */
1807 if (REAL_VALUE_ISNAN (r))
1814 /* See if R is less than the lower bound or greater than the
1819 tree lt = TYPE_MIN_VALUE (type);
1820 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1821 if (REAL_VALUES_LESS (r, l))
1824 high = TREE_INT_CST_HIGH (lt);
1825 low = TREE_INT_CST_LOW (lt);
1831 tree ut = TYPE_MAX_VALUE (type);
1834 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1835 if (REAL_VALUES_LESS (u, r))
1838 high = TREE_INT_CST_HIGH (ut);
1839 low = TREE_INT_CST_LOW (ut);
1845 REAL_VALUE_TO_INT (&low, &high, r);
1847 t = build_int_cst_wide (type, low, high);
1849 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1850 TREE_CONSTANT_OVERFLOW (arg1));
1854 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1855 to another floating point type. */
1858 fold_convert_const_real_from_real (tree type, tree arg1)
1860 REAL_VALUE_TYPE value;
1863 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1864 t = build_real (type, value);
1866 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1867 TREE_CONSTANT_OVERFLOW (t)
1868 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1872 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1873 type TYPE. If no simplification can be done return NULL_TREE. */
1876 fold_convert_const (enum tree_code code, tree type, tree arg1)
1878 if (TREE_TYPE (arg1) == type)
1881 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1883 if (TREE_CODE (arg1) == INTEGER_CST)
1884 return fold_convert_const_int_from_int (type, arg1);
1885 else if (TREE_CODE (arg1) == REAL_CST)
1886 return fold_convert_const_int_from_real (code, type, arg1);
1888 else if (TREE_CODE (type) == REAL_TYPE)
1890 if (TREE_CODE (arg1) == INTEGER_CST)
1891 return build_real_from_int_cst (type, arg1);
1892 if (TREE_CODE (arg1) == REAL_CST)
1893 return fold_convert_const_real_from_real (type, arg1);
1898 /* Construct a vector of zero elements of vector type TYPE. */
1901 build_zero_vector (tree type)
1906 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1907 units = TYPE_VECTOR_SUBPARTS (type);
1910 for (i = 0; i < units; i++)
1911 list = tree_cons (NULL_TREE, elem, list);
1912 return build_vector (type, list);
1915 /* Convert expression ARG to type TYPE. Used by the middle-end for
1916 simple conversions in preference to calling the front-end's convert. */
1919 fold_convert (tree type, tree arg)
1921 tree orig = TREE_TYPE (arg);
1927 if (TREE_CODE (arg) == ERROR_MARK
1928 || TREE_CODE (type) == ERROR_MARK
1929 || TREE_CODE (orig) == ERROR_MARK)
1930 return error_mark_node;
1932 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1933 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1934 TYPE_MAIN_VARIANT (orig)))
1935 return fold_build1 (NOP_EXPR, type, arg);
1937 switch (TREE_CODE (type))
1939 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1940 case POINTER_TYPE: case REFERENCE_TYPE:
1942 if (TREE_CODE (arg) == INTEGER_CST)
1944 tem = fold_convert_const (NOP_EXPR, type, arg);
1945 if (tem != NULL_TREE)
1948 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1949 || TREE_CODE (orig) == OFFSET_TYPE)
1950 return fold_build1 (NOP_EXPR, type, arg);
1951 if (TREE_CODE (orig) == COMPLEX_TYPE)
1953 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1954 return fold_convert (type, tem);
1956 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1957 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1958 return fold_build1 (NOP_EXPR, type, arg);
1961 if (TREE_CODE (arg) == INTEGER_CST)
1963 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1964 if (tem != NULL_TREE)
1967 else if (TREE_CODE (arg) == REAL_CST)
1969 tem = fold_convert_const (NOP_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1974 switch (TREE_CODE (orig))
1976 case INTEGER_TYPE: case CHAR_TYPE:
1977 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1978 case POINTER_TYPE: case REFERENCE_TYPE:
1979 return fold_build1 (FLOAT_EXPR, type, arg);
1982 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1986 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1987 return fold_convert (type, tem);
1994 switch (TREE_CODE (orig))
1996 case INTEGER_TYPE: case CHAR_TYPE:
1997 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1998 case POINTER_TYPE: case REFERENCE_TYPE:
2000 return build2 (COMPLEX_EXPR, type,
2001 fold_convert (TREE_TYPE (type), arg),
2002 fold_convert (TREE_TYPE (type), integer_zero_node));
2007 if (TREE_CODE (arg) == COMPLEX_EXPR)
2009 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2010 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2011 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2014 arg = save_expr (arg);
2015 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2016 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2017 rpart = fold_convert (TREE_TYPE (type), rpart);
2018 ipart = fold_convert (TREE_TYPE (type), ipart);
2019 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2027 if (integer_zerop (arg))
2028 return build_zero_vector (type);
2029 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2031 || TREE_CODE (orig) == VECTOR_TYPE);
2032 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2035 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2042 /* Return false if expr can be assumed not to be an lvalue, true
2046 maybe_lvalue_p (tree x)
2048 /* We only need to wrap lvalue tree codes. */
2049 switch (TREE_CODE (x))
2060 case ALIGN_INDIRECT_REF:
2061 case MISALIGNED_INDIRECT_REF:
2063 case ARRAY_RANGE_REF:
2069 case PREINCREMENT_EXPR:
2070 case PREDECREMENT_EXPR:
2072 case TRY_CATCH_EXPR:
2073 case WITH_CLEANUP_EXPR:
2084 /* Assume the worst for front-end tree codes. */
2085 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2093 /* Return an expr equal to X but certainly not valid as an lvalue. */
2098 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2103 if (! maybe_lvalue_p (x))
2105 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2108 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2109 Zero means allow extended lvalues. */
2111 int pedantic_lvalues;
2113 /* When pedantic, return an expr equal to X but certainly not valid as a
2114 pedantic lvalue. Otherwise, return X. */
2117 pedantic_non_lvalue (tree x)
2119 if (pedantic_lvalues)
2120 return non_lvalue (x);
2125 /* Given a tree comparison code, return the code that is the logical inverse
2126 of the given code. It is not safe to do this for floating-point
2127 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2128 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2131 invert_tree_comparison (enum tree_code code, bool honor_nans)
2133 if (honor_nans && flag_trapping_math)
2143 return honor_nans ? UNLE_EXPR : LE_EXPR;
2145 return honor_nans ? UNLT_EXPR : LT_EXPR;
2147 return honor_nans ? UNGE_EXPR : GE_EXPR;
2149 return honor_nans ? UNGT_EXPR : GT_EXPR;
2163 return UNORDERED_EXPR;
2164 case UNORDERED_EXPR:
2165 return ORDERED_EXPR;
2171 /* Similar, but return the comparison that results if the operands are
2172 swapped. This is safe for floating-point. */
2175 swap_tree_comparison (enum tree_code code)
2182 case UNORDERED_EXPR:
2208 /* Convert a comparison tree code from an enum tree_code representation
2209 into a compcode bit-based encoding. This function is the inverse of
2210 compcode_to_comparison. */
2212 static enum comparison_code
2213 comparison_to_compcode (enum tree_code code)
2230 return COMPCODE_ORD;
2231 case UNORDERED_EXPR:
2232 return COMPCODE_UNORD;
2234 return COMPCODE_UNLT;
2236 return COMPCODE_UNEQ;
2238 return COMPCODE_UNLE;
2240 return COMPCODE_UNGT;
2242 return COMPCODE_LTGT;
2244 return COMPCODE_UNGE;
2250 /* Convert a compcode bit-based encoding of a comparison operator back
2251 to GCC's enum tree_code representation. This function is the
2252 inverse of comparison_to_compcode. */
2254 static enum tree_code
2255 compcode_to_comparison (enum comparison_code code)
2272 return ORDERED_EXPR;
2273 case COMPCODE_UNORD:
2274 return UNORDERED_EXPR;
2292 /* Return a tree for the comparison which is the combination of
2293 doing the AND or OR (depending on CODE) of the two operations LCODE
2294 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2295 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2296 if this makes the transformation invalid. */
2299 combine_comparisons (enum tree_code code, enum tree_code lcode,
2300 enum tree_code rcode, tree truth_type,
2301 tree ll_arg, tree lr_arg)
2303 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2304 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2305 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2306 enum comparison_code compcode;
2310 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2311 compcode = lcompcode & rcompcode;
2314 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2315 compcode = lcompcode | rcompcode;
2324 /* Eliminate unordered comparisons, as well as LTGT and ORD
2325 which are not used unless the mode has NaNs. */
2326 compcode &= ~COMPCODE_UNORD;
2327 if (compcode == COMPCODE_LTGT)
2328 compcode = COMPCODE_NE;
2329 else if (compcode == COMPCODE_ORD)
2330 compcode = COMPCODE_TRUE;
2332 else if (flag_trapping_math)
2334 /* Check that the original operation and the optimized ones will trap
2335 under the same condition. */
2336 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2337 && (lcompcode != COMPCODE_EQ)
2338 && (lcompcode != COMPCODE_ORD);
2339 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2340 && (rcompcode != COMPCODE_EQ)
2341 && (rcompcode != COMPCODE_ORD);
2342 bool trap = (compcode & COMPCODE_UNORD) == 0
2343 && (compcode != COMPCODE_EQ)
2344 && (compcode != COMPCODE_ORD);
2346 /* In a short-circuited boolean expression the LHS might be
2347 such that the RHS, if evaluated, will never trap. For
2348 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2349 if neither x nor y is NaN. (This is a mixed blessing: for
2350 example, the expression above will never trap, hence
2351 optimizing it to x < y would be invalid). */
2352 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2353 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2356 /* If the comparison was short-circuited, and only the RHS
2357 trapped, we may now generate a spurious trap. */
2359 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2362 /* If we changed the conditions that cause a trap, we lose. */
2363 if ((ltrap || rtrap) != trap)
2367 if (compcode == COMPCODE_TRUE)
2368 return constant_boolean_node (true, truth_type);
2369 else if (compcode == COMPCODE_FALSE)
2370 return constant_boolean_node (false, truth_type);
2372 return fold_build2 (compcode_to_comparison (compcode),
2373 truth_type, ll_arg, lr_arg);
2376 /* Return nonzero if CODE is a tree code that represents a truth value. */
2379 truth_value_p (enum tree_code code)
2381 return (TREE_CODE_CLASS (code) == tcc_comparison
2382 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2383 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2384 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2387 /* Return nonzero if two operands (typically of the same tree node)
2388 are necessarily equal. If either argument has side-effects this
2389 function returns zero. FLAGS modifies behavior as follows:
2391 If OEP_ONLY_CONST is set, only return nonzero for constants.
2392 This function tests whether the operands are indistinguishable;
2393 it does not test whether they are equal using C's == operation.
2394 The distinction is important for IEEE floating point, because
2395 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2396 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2398 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2399 even though it may hold multiple values during a function.
2400 This is because a GCC tree node guarantees that nothing else is
2401 executed between the evaluation of its "operands" (which may often
2402 be evaluated in arbitrary order). Hence if the operands themselves
2403 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2404 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2405 unset means assuming isochronic (or instantaneous) tree equivalence.
2406 Unless comparing arbitrary expression trees, such as from different
2407 statements, this flag can usually be left unset.
2409 If OEP_PURE_SAME is set, then pure functions with identical arguments
2410 are considered the same. It is used when the caller has other ways
2411 to ensure that global memory is unchanged in between. */
2414 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2416 /* If either is ERROR_MARK, they aren't equal. */
2417 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2420 /* If both types don't have the same signedness, then we can't consider
2421 them equal. We must check this before the STRIP_NOPS calls
2422 because they may change the signedness of the arguments. */
2423 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2429 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2430 /* This is needed for conversions and for COMPONENT_REF.
2431 Might as well play it safe and always test this. */
2432 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2433 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2434 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2437 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2438 We don't care about side effects in that case because the SAVE_EXPR
2439 takes care of that for us. In all other cases, two expressions are
2440 equal if they have no side effects. If we have two identical
2441 expressions with side effects that should be treated the same due
2442 to the only side effects being identical SAVE_EXPR's, that will
2443 be detected in the recursive calls below. */
2444 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2445 && (TREE_CODE (arg0) == SAVE_EXPR
2446 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2449 /* Next handle constant cases, those for which we can return 1 even
2450 if ONLY_CONST is set. */
2451 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2452 switch (TREE_CODE (arg0))
2455 return (! TREE_CONSTANT_OVERFLOW (arg0)
2456 && ! TREE_CONSTANT_OVERFLOW (arg1)
2457 && tree_int_cst_equal (arg0, arg1));
2460 return (! TREE_CONSTANT_OVERFLOW (arg0)
2461 && ! TREE_CONSTANT_OVERFLOW (arg1)
2462 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2463 TREE_REAL_CST (arg1)));
2469 if (TREE_CONSTANT_OVERFLOW (arg0)
2470 || TREE_CONSTANT_OVERFLOW (arg1))
2473 v1 = TREE_VECTOR_CST_ELTS (arg0);
2474 v2 = TREE_VECTOR_CST_ELTS (arg1);
2477 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2480 v1 = TREE_CHAIN (v1);
2481 v2 = TREE_CHAIN (v2);
2488 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2490 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2494 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2495 && ! memcmp (TREE_STRING_POINTER (arg0),
2496 TREE_STRING_POINTER (arg1),
2497 TREE_STRING_LENGTH (arg0)));
2500 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2506 if (flags & OEP_ONLY_CONST)
2509 /* Define macros to test an operand from arg0 and arg1 for equality and a
2510 variant that allows null and views null as being different from any
2511 non-null value. In the latter case, if either is null, the both
2512 must be; otherwise, do the normal comparison. */
2513 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2514 TREE_OPERAND (arg1, N), flags)
2516 #define OP_SAME_WITH_NULL(N) \
2517 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2518 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2520 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2523 /* Two conversions are equal only if signedness and modes match. */
2524 switch (TREE_CODE (arg0))
2529 case FIX_TRUNC_EXPR:
2530 case FIX_FLOOR_EXPR:
2531 case FIX_ROUND_EXPR:
2532 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2533 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2543 case tcc_comparison:
2545 if (OP_SAME (0) && OP_SAME (1))
2548 /* For commutative ops, allow the other order. */
2549 return (commutative_tree_code (TREE_CODE (arg0))
2550 && operand_equal_p (TREE_OPERAND (arg0, 0),
2551 TREE_OPERAND (arg1, 1), flags)
2552 && operand_equal_p (TREE_OPERAND (arg0, 1),
2553 TREE_OPERAND (arg1, 0), flags));
2556 /* If either of the pointer (or reference) expressions we are
2557 dereferencing contain a side effect, these cannot be equal. */
2558 if (TREE_SIDE_EFFECTS (arg0)
2559 || TREE_SIDE_EFFECTS (arg1))
2562 switch (TREE_CODE (arg0))
2565 case ALIGN_INDIRECT_REF:
2566 case MISALIGNED_INDIRECT_REF:
2572 case ARRAY_RANGE_REF:
2573 /* Operands 2 and 3 may be null. */
2576 && OP_SAME_WITH_NULL (2)
2577 && OP_SAME_WITH_NULL (3));
2580 /* Handle operand 2 the same as for ARRAY_REF. */
2581 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2584 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2590 case tcc_expression:
2591 switch (TREE_CODE (arg0))
2594 case TRUTH_NOT_EXPR:
2597 case TRUTH_ANDIF_EXPR:
2598 case TRUTH_ORIF_EXPR:
2599 return OP_SAME (0) && OP_SAME (1);
2601 case TRUTH_AND_EXPR:
2603 case TRUTH_XOR_EXPR:
2604 if (OP_SAME (0) && OP_SAME (1))
2607 /* Otherwise take into account this is a commutative operation. */
2608 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2609 TREE_OPERAND (arg1, 1), flags)
2610 && operand_equal_p (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 0), flags));
2614 /* If the CALL_EXPRs call different functions, then they
2615 clearly can not be equal. */
2620 unsigned int cef = call_expr_flags (arg0);
2621 if (flags & OEP_PURE_SAME)
2622 cef &= ECF_CONST | ECF_PURE;
2629 /* Now see if all the arguments are the same. operand_equal_p
2630 does not handle TREE_LIST, so we walk the operands here
2631 feeding them to operand_equal_p. */
2632 arg0 = TREE_OPERAND (arg0, 1);
2633 arg1 = TREE_OPERAND (arg1, 1);
2634 while (arg0 && arg1)
2636 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2640 arg0 = TREE_CHAIN (arg0);
2641 arg1 = TREE_CHAIN (arg1);
2644 /* If we get here and both argument lists are exhausted
2645 then the CALL_EXPRs are equal. */
2646 return ! (arg0 || arg1);
2652 case tcc_declaration:
2653 /* Consider __builtin_sqrt equal to sqrt. */
2654 return (TREE_CODE (arg0) == FUNCTION_DECL
2655 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2656 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2657 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2664 #undef OP_SAME_WITH_NULL
2667 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2668 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2670 When in doubt, return 0. */
2673 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2675 int unsignedp1, unsignedpo;
2676 tree primarg0, primarg1, primother;
2677 unsigned int correct_width;
2679 if (operand_equal_p (arg0, arg1, 0))
2682 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2683 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2686 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2687 and see if the inner values are the same. This removes any
2688 signedness comparison, which doesn't matter here. */
2689 primarg0 = arg0, primarg1 = arg1;
2690 STRIP_NOPS (primarg0);
2691 STRIP_NOPS (primarg1);
2692 if (operand_equal_p (primarg0, primarg1, 0))
2695 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2696 actual comparison operand, ARG0.
2698 First throw away any conversions to wider types
2699 already present in the operands. */
2701 primarg1 = get_narrower (arg1, &unsignedp1);
2702 primother = get_narrower (other, &unsignedpo);
2704 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2705 if (unsignedp1 == unsignedpo
2706 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2707 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2709 tree type = TREE_TYPE (arg0);
2711 /* Make sure shorter operand is extended the right way
2712 to match the longer operand. */
2713 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2714 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2716 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2723 /* See if ARG is an expression that is either a comparison or is performing
2724 arithmetic on comparisons. The comparisons must only be comparing
2725 two different values, which will be stored in *CVAL1 and *CVAL2; if
2726 they are nonzero it means that some operands have already been found.
2727 No variables may be used anywhere else in the expression except in the
2728 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2729 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2731 If this is true, return 1. Otherwise, return zero. */
2734 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2736 enum tree_code code = TREE_CODE (arg);
2737 enum tree_code_class class = TREE_CODE_CLASS (code);
2739 /* We can handle some of the tcc_expression cases here. */
2740 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2742 else if (class == tcc_expression
2743 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2744 || code == COMPOUND_EXPR))
2747 else if (class == tcc_expression && code == SAVE_EXPR
2748 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2750 /* If we've already found a CVAL1 or CVAL2, this expression is
2751 two complex to handle. */
2752 if (*cval1 || *cval2)
2762 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2765 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2766 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2767 cval1, cval2, save_p));
2772 case tcc_expression:
2773 if (code == COND_EXPR)
2774 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2775 cval1, cval2, save_p)
2776 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2777 cval1, cval2, save_p)
2778 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2779 cval1, cval2, save_p));
2782 case tcc_comparison:
2783 /* First see if we can handle the first operand, then the second. For
2784 the second operand, we know *CVAL1 can't be zero. It must be that
2785 one side of the comparison is each of the values; test for the
2786 case where this isn't true by failing if the two operands
2789 if (operand_equal_p (TREE_OPERAND (arg, 0),
2790 TREE_OPERAND (arg, 1), 0))
2794 *cval1 = TREE_OPERAND (arg, 0);
2795 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2797 else if (*cval2 == 0)
2798 *cval2 = TREE_OPERAND (arg, 0);
2799 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2804 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2806 else if (*cval2 == 0)
2807 *cval2 = TREE_OPERAND (arg, 1);
2808 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2820 /* ARG is a tree that is known to contain just arithmetic operations and
2821 comparisons. Evaluate the operations in the tree substituting NEW0 for
2822 any occurrence of OLD0 as an operand of a comparison and likewise for
2826 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2828 tree type = TREE_TYPE (arg);
2829 enum tree_code code = TREE_CODE (arg);
2830 enum tree_code_class class = TREE_CODE_CLASS (code);
2832 /* We can handle some of the tcc_expression cases here. */
2833 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2835 else if (class == tcc_expression
2836 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2842 return fold_build1 (code, type,
2843 eval_subst (TREE_OPERAND (arg, 0),
2844 old0, new0, old1, new1));
2847 return fold_build2 (code, type,
2848 eval_subst (TREE_OPERAND (arg, 0),
2849 old0, new0, old1, new1),
2850 eval_subst (TREE_OPERAND (arg, 1),
2851 old0, new0, old1, new1));
2853 case tcc_expression:
2857 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2860 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2863 return fold_build3 (code, type,
2864 eval_subst (TREE_OPERAND (arg, 0),
2865 old0, new0, old1, new1),
2866 eval_subst (TREE_OPERAND (arg, 1),
2867 old0, new0, old1, new1),
2868 eval_subst (TREE_OPERAND (arg, 2),
2869 old0, new0, old1, new1));
2873 /* Fall through - ??? */
2875 case tcc_comparison:
2877 tree arg0 = TREE_OPERAND (arg, 0);
2878 tree arg1 = TREE_OPERAND (arg, 1);
2880 /* We need to check both for exact equality and tree equality. The
2881 former will be true if the operand has a side-effect. In that
2882 case, we know the operand occurred exactly once. */
2884 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2886 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2889 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2891 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2894 return fold_build2 (code, type, arg0, arg1);
2902 /* Return a tree for the case when the result of an expression is RESULT
2903 converted to TYPE and OMITTED was previously an operand of the expression
2904 but is now not needed (e.g., we folded OMITTED * 0).
2906 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2907 the conversion of RESULT to TYPE. */
2910 omit_one_operand (tree type, tree result, tree omitted)
2912 tree t = fold_convert (type, result);
2914 if (TREE_SIDE_EFFECTS (omitted))
2915 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2917 return non_lvalue (t);
2920 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2923 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2925 tree t = fold_convert (type, result);
2927 if (TREE_SIDE_EFFECTS (omitted))
2928 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2930 return pedantic_non_lvalue (t);
2933 /* Return a tree for the case when the result of an expression is RESULT
2934 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2935 of the expression but are now not needed.
2937 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2938 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2939 evaluated before OMITTED2. Otherwise, if neither has side effects,
2940 just do the conversion of RESULT to TYPE. */
2943 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2945 tree t = fold_convert (type, result);
2947 if (TREE_SIDE_EFFECTS (omitted2))
2948 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2949 if (TREE_SIDE_EFFECTS (omitted1))
2950 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2952 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2956 /* Return a simplified tree node for the truth-negation of ARG. This
2957 never alters ARG itself. We assume that ARG is an operation that
2958 returns a truth value (0 or 1).
2960 FIXME: one would think we would fold the result, but it causes
2961 problems with the dominator optimizer. */
2963 invert_truthvalue (tree arg)
2965 tree type = TREE_TYPE (arg);
2966 enum tree_code code = TREE_CODE (arg);
2968 if (code == ERROR_MARK)
2971 /* If this is a comparison, we can simply invert it, except for
2972 floating-point non-equality comparisons, in which case we just
2973 enclose a TRUTH_NOT_EXPR around what we have. */
2975 if (TREE_CODE_CLASS (code) == tcc_comparison)
2977 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2978 if (FLOAT_TYPE_P (op_type)
2979 && flag_trapping_math
2980 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2981 && code != NE_EXPR && code != EQ_EXPR)
2982 return build1 (TRUTH_NOT_EXPR, type, arg);
2985 code = invert_tree_comparison (code,
2986 HONOR_NANS (TYPE_MODE (op_type)));
2987 if (code == ERROR_MARK)
2988 return build1 (TRUTH_NOT_EXPR, type, arg);
2990 return build2 (code, type,
2991 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2998 return constant_boolean_node (integer_zerop (arg), type);
3000 case TRUTH_AND_EXPR:
3001 return build2 (TRUTH_OR_EXPR, type,
3002 invert_truthvalue (TREE_OPERAND (arg, 0)),
3003 invert_truthvalue (TREE_OPERAND (arg, 1)));
3006 return build2 (TRUTH_AND_EXPR, type,
3007 invert_truthvalue (TREE_OPERAND (arg, 0)),
3008 invert_truthvalue (TREE_OPERAND (arg, 1)));
3010 case TRUTH_XOR_EXPR:
3011 /* Here we can invert either operand. We invert the first operand
3012 unless the second operand is a TRUTH_NOT_EXPR in which case our
3013 result is the XOR of the first operand with the inside of the
3014 negation of the second operand. */
3016 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3017 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3018 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3020 return build2 (TRUTH_XOR_EXPR, type,
3021 invert_truthvalue (TREE_OPERAND (arg, 0)),
3022 TREE_OPERAND (arg, 1));
3024 case TRUTH_ANDIF_EXPR:
3025 return build2 (TRUTH_ORIF_EXPR, type,
3026 invert_truthvalue (TREE_OPERAND (arg, 0)),
3027 invert_truthvalue (TREE_OPERAND (arg, 1)));
3029 case TRUTH_ORIF_EXPR:
3030 return build2 (TRUTH_ANDIF_EXPR, type,
3031 invert_truthvalue (TREE_OPERAND (arg, 0)),
3032 invert_truthvalue (TREE_OPERAND (arg, 1)));
3034 case TRUTH_NOT_EXPR:
3035 return TREE_OPERAND (arg, 0);
3039 tree arg1 = TREE_OPERAND (arg, 1);
3040 tree arg2 = TREE_OPERAND (arg, 2);
3041 /* A COND_EXPR may have a throw as one operand, which
3042 then has void type. Just leave void operands
3044 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3045 VOID_TYPE_P (TREE_TYPE (arg1))
3046 ? arg1 : invert_truthvalue (arg1),
3047 VOID_TYPE_P (TREE_TYPE (arg2))
3048 ? arg2 : invert_truthvalue (arg2));
3052 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3053 invert_truthvalue (TREE_OPERAND (arg, 1)));
3055 case NON_LVALUE_EXPR:
3056 return invert_truthvalue (TREE_OPERAND (arg, 0));
3059 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3064 return build1 (TREE_CODE (arg), type,
3065 invert_truthvalue (TREE_OPERAND (arg, 0)));
3068 if (!integer_onep (TREE_OPERAND (arg, 1)))
3070 return build2 (EQ_EXPR, type, arg,
3071 fold_convert (type, integer_zero_node));
3074 return build1 (TRUTH_NOT_EXPR, type, arg);
3076 case CLEANUP_POINT_EXPR:
3077 return build1 (CLEANUP_POINT_EXPR, type,
3078 invert_truthvalue (TREE_OPERAND (arg, 0)));
3083 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3084 return build1 (TRUTH_NOT_EXPR, type, arg);
3087 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3088 operands are another bit-wise operation with a common input. If so,
3089 distribute the bit operations to save an operation and possibly two if
3090 constants are involved. For example, convert
3091 (A | B) & (A | C) into A | (B & C)
3092 Further simplification will occur if B and C are constants.
3094 If this optimization cannot be done, 0 will be returned. */
3097 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3102 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3103 || TREE_CODE (arg0) == code
3104 || (TREE_CODE (arg0) != BIT_AND_EXPR
3105 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3108 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3110 common = TREE_OPERAND (arg0, 0);
3111 left = TREE_OPERAND (arg0, 1);
3112 right = TREE_OPERAND (arg1, 1);
3114 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3116 common = TREE_OPERAND (arg0, 0);
3117 left = TREE_OPERAND (arg0, 1);
3118 right = TREE_OPERAND (arg1, 0);
3120 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3122 common = TREE_OPERAND (arg0, 1);
3123 left = TREE_OPERAND (arg0, 0);
3124 right = TREE_OPERAND (arg1, 1);
3126 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3128 common = TREE_OPERAND (arg0, 1);
3129 left = TREE_OPERAND (arg0, 0);
3130 right = TREE_OPERAND (arg1, 0);
3135 return fold_build2 (TREE_CODE (arg0), type, common,
3136 fold_build2 (code, type, left, right));
3139 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3140 with code CODE. This optimization is unsafe. */
3142 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3144 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3145 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3147 /* (A / C) +- (B / C) -> (A +- B) / C. */
3149 && operand_equal_p (TREE_OPERAND (arg0, 1),
3150 TREE_OPERAND (arg1, 1), 0))
3151 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3152 fold_build2 (code, type,
3153 TREE_OPERAND (arg0, 0),
3154 TREE_OPERAND (arg1, 0)),
3155 TREE_OPERAND (arg0, 1));
3157 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3158 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3159 TREE_OPERAND (arg1, 0), 0)
3160 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3161 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3163 REAL_VALUE_TYPE r0, r1;
3164 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3165 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3167 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3169 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3170 real_arithmetic (&r0, code, &r0, &r1);
3171 return fold_build2 (MULT_EXPR, type,
3172 TREE_OPERAND (arg0, 0),
3173 build_real (type, r0));
3179 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3180 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3183 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3190 tree size = TYPE_SIZE (TREE_TYPE (inner));
3191 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3192 || POINTER_TYPE_P (TREE_TYPE (inner)))
3193 && host_integerp (size, 0)
3194 && tree_low_cst (size, 0) == bitsize)
3195 return fold_convert (type, inner);
3198 result = build3 (BIT_FIELD_REF, type, inner,
3199 size_int (bitsize), bitsize_int (bitpos));
3201 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3206 /* Optimize a bit-field compare.
3208 There are two cases: First is a compare against a constant and the
3209 second is a comparison of two items where the fields are at the same
3210 bit position relative to the start of a chunk (byte, halfword, word)
3211 large enough to contain it. In these cases we can avoid the shift
3212 implicit in bitfield extractions.
3214 For constants, we emit a compare of the shifted constant with the
3215 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3216 compared. For two fields at the same position, we do the ANDs with the
3217 similar mask and compare the result of the ANDs.
3219 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3220 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3221 are the left and right operands of the comparison, respectively.
3223 If the optimization described above can be done, we return the resulting
3224 tree. Otherwise we return zero. */
3227 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3230 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3231 tree type = TREE_TYPE (lhs);
3232 tree signed_type, unsigned_type;
3233 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3234 enum machine_mode lmode, rmode, nmode;
3235 int lunsignedp, runsignedp;
3236 int lvolatilep = 0, rvolatilep = 0;
3237 tree linner, rinner = NULL_TREE;
3241 /* Get all the information about the extractions being done. If the bit size
3242 if the same as the size of the underlying object, we aren't doing an
3243 extraction at all and so can do nothing. We also don't want to
3244 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3245 then will no longer be able to replace it. */
3246 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3247 &lunsignedp, &lvolatilep, false);
3248 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3249 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3254 /* If this is not a constant, we can only do something if bit positions,
3255 sizes, and signedness are the same. */
3256 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3257 &runsignedp, &rvolatilep, false);
3259 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3260 || lunsignedp != runsignedp || offset != 0
3261 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3265 /* See if we can find a mode to refer to this field. We should be able to,
3266 but fail if we can't. */
3267 nmode = get_best_mode (lbitsize, lbitpos,
3268 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3269 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3270 TYPE_ALIGN (TREE_TYPE (rinner))),
3271 word_mode, lvolatilep || rvolatilep);
3272 if (nmode == VOIDmode)
3275 /* Set signed and unsigned types of the precision of this mode for the
3277 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3278 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3280 /* Compute the bit position and size for the new reference and our offset
3281 within it. If the new reference is the same size as the original, we
3282 won't optimize anything, so return zero. */
3283 nbitsize = GET_MODE_BITSIZE (nmode);
3284 nbitpos = lbitpos & ~ (nbitsize - 1);
3286 if (nbitsize == lbitsize)
3289 if (BYTES_BIG_ENDIAN)
3290 lbitpos = nbitsize - lbitsize - lbitpos;
3292 /* Make the mask to be used against the extracted field. */
3293 mask = build_int_cst (unsigned_type, -1);
3294 mask = force_fit_type (mask, 0, false, false);
3295 mask = fold_convert (unsigned_type, mask);
3296 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3297 mask = const_binop (RSHIFT_EXPR, mask,
3298 size_int (nbitsize - lbitsize - lbitpos), 0);
3301 /* If not comparing with constant, just rework the comparison
3303 return build2 (code, compare_type,
3304 build2 (BIT_AND_EXPR, unsigned_type,
3305 make_bit_field_ref (linner, unsigned_type,
3306 nbitsize, nbitpos, 1),
3308 build2 (BIT_AND_EXPR, unsigned_type,
3309 make_bit_field_ref (rinner, unsigned_type,
3310 nbitsize, nbitpos, 1),
3313 /* Otherwise, we are handling the constant case. See if the constant is too
3314 big for the field. Warn and return a tree of for 0 (false) if so. We do
3315 this not only for its own sake, but to avoid having to test for this
3316 error case below. If we didn't, we might generate wrong code.
3318 For unsigned fields, the constant shifted right by the field length should
3319 be all zero. For signed fields, the high-order bits should agree with
3324 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3325 fold_convert (unsigned_type, rhs),
3326 size_int (lbitsize), 0)))
3328 warning (0, "comparison is always %d due to width of bit-field",
3330 return constant_boolean_node (code == NE_EXPR, compare_type);
3335 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3336 size_int (lbitsize - 1), 0);
3337 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3339 warning (0, "comparison is always %d due to width of bit-field",
3341 return constant_boolean_node (code == NE_EXPR, compare_type);
3345 /* Single-bit compares should always be against zero. */
3346 if (lbitsize == 1 && ! integer_zerop (rhs))
3348 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3349 rhs = fold_convert (type, integer_zero_node);
3352 /* Make a new bitfield reference, shift the constant over the
3353 appropriate number of bits and mask it with the computed mask
3354 (in case this was a signed field). If we changed it, make a new one. */
3355 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3358 TREE_SIDE_EFFECTS (lhs) = 1;
3359 TREE_THIS_VOLATILE (lhs) = 1;
3362 rhs = const_binop (BIT_AND_EXPR,
3363 const_binop (LSHIFT_EXPR,
3364 fold_convert (unsigned_type, rhs),
3365 size_int (lbitpos), 0),
3368 return build2 (code, compare_type,
3369 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3373 /* Subroutine for fold_truthop: decode a field reference.
3375 If EXP is a comparison reference, we return the innermost reference.
3377 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3378 set to the starting bit number.
3380 If the innermost field can be completely contained in a mode-sized
3381 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3383 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3384 otherwise it is not changed.
3386 *PUNSIGNEDP is set to the signedness of the field.
3388 *PMASK is set to the mask used. This is either contained in a
3389 BIT_AND_EXPR or derived from the width of the field.
3391 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3393 Return 0 if this is not a component reference or is one that we can't
3394 do anything with. */
3397 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3398 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3399 int *punsignedp, int *pvolatilep,
3400 tree *pmask, tree *pand_mask)
3402 tree outer_type = 0;
3404 tree mask, inner, offset;
3406 unsigned int precision;
3408 /* All the optimizations using this function assume integer fields.
3409 There are problems with FP fields since the type_for_size call
3410 below can fail for, e.g., XFmode. */
3411 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3414 /* We are interested in the bare arrangement of bits, so strip everything
3415 that doesn't affect the machine mode. However, record the type of the
3416 outermost expression if it may matter below. */
3417 if (TREE_CODE (exp) == NOP_EXPR
3418 || TREE_CODE (exp) == CONVERT_EXPR
3419 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3420 outer_type = TREE_TYPE (exp);
3423 if (TREE_CODE (exp) == BIT_AND_EXPR)
3425 and_mask = TREE_OPERAND (exp, 1);
3426 exp = TREE_OPERAND (exp, 0);
3427 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3428 if (TREE_CODE (and_mask) != INTEGER_CST)
3432 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3433 punsignedp, pvolatilep, false);
3434 if ((inner == exp && and_mask == 0)
3435 || *pbitsize < 0 || offset != 0
3436 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3439 /* If the number of bits in the reference is the same as the bitsize of
3440 the outer type, then the outer type gives the signedness. Otherwise
3441 (in case of a small bitfield) the signedness is unchanged. */
3442 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3443 *punsignedp = TYPE_UNSIGNED (outer_type);
3445 /* Compute the mask to access the bitfield. */
3446 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3447 precision = TYPE_PRECISION (unsigned_type);
3449 mask = build_int_cst (unsigned_type, -1);
3450 mask = force_fit_type (mask, 0, false, false);
3452 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3453 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3455 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3457 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3458 fold_convert (unsigned_type, and_mask), mask);
3461 *pand_mask = and_mask;
3465 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3469 all_ones_mask_p (tree mask, int size)
3471 tree type = TREE_TYPE (mask);
3472 unsigned int precision = TYPE_PRECISION (type);
3475 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3476 tmask = force_fit_type (tmask, 0, false, false);
3479 tree_int_cst_equal (mask,
3480 const_binop (RSHIFT_EXPR,
3481 const_binop (LSHIFT_EXPR, tmask,
3482 size_int (precision - size),
3484 size_int (precision - size), 0));
3487 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3488 represents the sign bit of EXP's type. If EXP represents a sign
3489 or zero extension, also test VAL against the unextended type.
3490 The return value is the (sub)expression whose sign bit is VAL,
3491 or NULL_TREE otherwise. */
3494 sign_bit_p (tree exp, tree val)
3496 unsigned HOST_WIDE_INT mask_lo, lo;
3497 HOST_WIDE_INT mask_hi, hi;
3501 /* Tree EXP must have an integral type. */
3502 t = TREE_TYPE (exp);
3503 if (! INTEGRAL_TYPE_P (t))
3506 /* Tree VAL must be an integer constant. */
3507 if (TREE_CODE (val) != INTEGER_CST
3508 || TREE_CONSTANT_OVERFLOW (val))
3511 width = TYPE_PRECISION (t);
3512 if (width > HOST_BITS_PER_WIDE_INT)
3514 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3517 mask_hi = ((unsigned HOST_WIDE_INT) -1
3518 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3524 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3527 mask_lo = ((unsigned HOST_WIDE_INT) -1
3528 >> (HOST_BITS_PER_WIDE_INT - width));
3531 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3532 treat VAL as if it were unsigned. */
3533 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3534 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3537 /* Handle extension from a narrower type. */
3538 if (TREE_CODE (exp) == NOP_EXPR
3539 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3540 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3545 /* Subroutine for fold_truthop: determine if an operand is simple enough
3546 to be evaluated unconditionally. */
3549 simple_operand_p (tree exp)
3551 /* Strip any conversions that don't change the machine mode. */
3554 return (CONSTANT_CLASS_P (exp)
3555 || TREE_CODE (exp) == SSA_NAME
3557 && ! TREE_ADDRESSABLE (exp)
3558 && ! TREE_THIS_VOLATILE (exp)
3559 && ! DECL_NONLOCAL (exp)
3560 /* Don't regard global variables as simple. They may be
3561 allocated in ways unknown to the compiler (shared memory,
3562 #pragma weak, etc). */
3563 && ! TREE_PUBLIC (exp)
3564 && ! DECL_EXTERNAL (exp)
3565 /* Loading a static variable is unduly expensive, but global
3566 registers aren't expensive. */
3567 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3570 /* The following functions are subroutines to fold_range_test and allow it to
3571 try to change a logical combination of comparisons into a range test.
3574 X == 2 || X == 3 || X == 4 || X == 5
3578 (unsigned) (X - 2) <= 3
3580 We describe each set of comparisons as being either inside or outside
3581 a range, using a variable named like IN_P, and then describe the
3582 range with a lower and upper bound. If one of the bounds is omitted,
3583 it represents either the highest or lowest value of the type.
3585 In the comments below, we represent a range by two numbers in brackets
3586 preceded by a "+" to designate being inside that range, or a "-" to
3587 designate being outside that range, so the condition can be inverted by
3588 flipping the prefix. An omitted bound is represented by a "-". For
3589 example, "- [-, 10]" means being outside the range starting at the lowest
3590 possible value and ending at 10, in other words, being greater than 10.
3591 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3594 We set up things so that the missing bounds are handled in a consistent
3595 manner so neither a missing bound nor "true" and "false" need to be
3596 handled using a special case. */
3598 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3599 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3600 and UPPER1_P are nonzero if the respective argument is an upper bound
3601 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3602 must be specified for a comparison. ARG1 will be converted to ARG0's
3603 type if both are specified. */
3606 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3607 tree arg1, int upper1_p)
3613 /* If neither arg represents infinity, do the normal operation.
3614 Else, if not a comparison, return infinity. Else handle the special
3615 comparison rules. Note that most of the cases below won't occur, but
3616 are handled for consistency. */
3618 if (arg0 != 0 && arg1 != 0)
3620 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3621 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3623 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3626 if (TREE_CODE_CLASS (code) != tcc_comparison)
3629 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3630 for neither. In real maths, we cannot assume open ended ranges are
3631 the same. But, this is computer arithmetic, where numbers are finite.
3632 We can therefore make the transformation of any unbounded range with
3633 the value Z, Z being greater than any representable number. This permits
3634 us to treat unbounded ranges as equal. */
3635 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3636 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3640 result = sgn0 == sgn1;
3643 result = sgn0 != sgn1;
3646 result = sgn0 < sgn1;
3649 result = sgn0 <= sgn1;
3652 result = sgn0 > sgn1;
3655 result = sgn0 >= sgn1;
3661 return constant_boolean_node (result, type);
3664 /* Given EXP, a logical expression, set the range it is testing into
3665 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3666 actually being tested. *PLOW and *PHIGH will be made of the same type
3667 as the returned expression. If EXP is not a comparison, we will most
3668 likely not be returning a useful value and range. */
3671 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3673 enum tree_code code;
3674 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3675 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3677 tree low, high, n_low, n_high;
3679 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3680 and see if we can refine the range. Some of the cases below may not
3681 happen, but it doesn't seem worth worrying about this. We "continue"
3682 the outer loop when we've changed something; otherwise we "break"
3683 the switch, which will "break" the while. */
3686 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3690 code = TREE_CODE (exp);
3691 exp_type = TREE_TYPE (exp);
3693 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3695 if (TREE_CODE_LENGTH (code) > 0)
3696 arg0 = TREE_OPERAND (exp, 0);
3697 if (TREE_CODE_CLASS (code) == tcc_comparison
3698 || TREE_CODE_CLASS (code) == tcc_unary
3699 || TREE_CODE_CLASS (code) == tcc_binary)
3700 arg0_type = TREE_TYPE (arg0);
3701 if (TREE_CODE_CLASS (code) == tcc_binary
3702 || TREE_CODE_CLASS (code) == tcc_comparison
3703 || (TREE_CODE_CLASS (code) == tcc_expression
3704 && TREE_CODE_LENGTH (code) > 1))
3705 arg1 = TREE_OPERAND (exp, 1);
3710 case TRUTH_NOT_EXPR:
3711 in_p = ! in_p, exp = arg0;
3714 case EQ_EXPR: case NE_EXPR:
3715 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3716 /* We can only do something if the range is testing for zero
3717 and if the second operand is an integer constant. Note that
3718 saying something is "in" the range we make is done by
3719 complementing IN_P since it will set in the initial case of
3720 being not equal to zero; "out" is leaving it alone. */
3721 if (low == 0 || high == 0
3722 || ! integer_zerop (low) || ! integer_zerop (high)
3723 || TREE_CODE (arg1) != INTEGER_CST)
3728 case NE_EXPR: /* - [c, c] */
3731 case EQ_EXPR: /* + [c, c] */
3732 in_p = ! in_p, low = high = arg1;
3734 case GT_EXPR: /* - [-, c] */
3735 low = 0, high = arg1;
3737 case GE_EXPR: /* + [c, -] */
3738 in_p = ! in_p, low = arg1, high = 0;
3740 case LT_EXPR: /* - [c, -] */
3741 low = arg1, high = 0;
3743 case LE_EXPR: /* + [-, c] */
3744 in_p = ! in_p, low = 0, high = arg1;
3750 /* If this is an unsigned comparison, we also know that EXP is
3751 greater than or equal to zero. We base the range tests we make
3752 on that fact, so we record it here so we can parse existing
3753 range tests. We test arg0_type since often the return type
3754 of, e.g. EQ_EXPR, is boolean. */
3755 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3757 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3759 fold_convert (arg0_type, integer_zero_node),
3763 in_p = n_in_p, low = n_low, high = n_high;
3765 /* If the high bound is missing, but we have a nonzero low
3766 bound, reverse the range so it goes from zero to the low bound
3768 if (high == 0 && low && ! integer_zerop (low))
3771 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3772 integer_one_node, 0);
3773 low = fold_convert (arg0_type, integer_zero_node);
3781 /* (-x) IN [a,b] -> x in [-b, -a] */
3782 n_low = range_binop (MINUS_EXPR, exp_type,
3783 fold_convert (exp_type, integer_zero_node),
3785 n_high = range_binop (MINUS_EXPR, exp_type,
3786 fold_convert (exp_type, integer_zero_node),
3788 low = n_low, high = n_high;
3794 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3795 fold_convert (exp_type, integer_one_node));
3798 case PLUS_EXPR: case MINUS_EXPR:
3799 if (TREE_CODE (arg1) != INTEGER_CST)
3802 /* If EXP is signed, any overflow in the computation is undefined,
3803 so we don't worry about it so long as our computations on
3804 the bounds don't overflow. For unsigned, overflow is defined
3805 and this is exactly the right thing. */
3806 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3807 arg0_type, low, 0, arg1, 0);
3808 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3809 arg0_type, high, 1, arg1, 0);
3810 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3811 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3814 /* Check for an unsigned range which has wrapped around the maximum
3815 value thus making n_high < n_low, and normalize it. */
3816 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3818 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3819 integer_one_node, 0);
3820 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3821 integer_one_node, 0);
3823 /* If the range is of the form +/- [ x+1, x ], we won't
3824 be able to normalize it. But then, it represents the
3825 whole range or the empty set, so make it
3827 if (tree_int_cst_equal (n_low, low)
3828 && tree_int_cst_equal (n_high, high))
3834 low = n_low, high = n_high;
3839 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3840 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3843 if (! INTEGRAL_TYPE_P (arg0_type)
3844 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3845 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3848 n_low = low, n_high = high;
3851 n_low = fold_convert (arg0_type, n_low);
3854 n_high = fold_convert (arg0_type, n_high);
3857 /* If we're converting arg0 from an unsigned type, to exp,
3858 a signed type, we will be doing the comparison as unsigned.
3859 The tests above have already verified that LOW and HIGH
3862 So we have to ensure that we will handle large unsigned
3863 values the same way that the current signed bounds treat
3866 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3869 tree equiv_type = lang_hooks.types.type_for_mode
3870 (TYPE_MODE (arg0_type), 1);
3872 /* A range without an upper bound is, naturally, unbounded.
3873 Since convert would have cropped a very large value, use
3874 the max value for the destination type. */
3876 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3877 : TYPE_MAX_VALUE (arg0_type);
3879 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3880 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3881 fold_convert (arg0_type,
3883 fold_convert (arg0_type,
3886 /* If the low bound is specified, "and" the range with the
3887 range for which the original unsigned value will be
3891 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3892 1, n_low, n_high, 1,
3893 fold_convert (arg0_type,
3898 in_p = (n_in_p == in_p);
3902 /* Otherwise, "or" the range with the range of the input
3903 that will be interpreted as negative. */
3904 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3905 0, n_low, n_high, 1,
3906 fold_convert (arg0_type,
3911 in_p = (in_p != n_in_p);
3916 low = n_low, high = n_high;
3926 /* If EXP is a constant, we can evaluate whether this is true or false. */
3927 if (TREE_CODE (exp) == INTEGER_CST)
3929 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3931 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3937 *pin_p = in_p, *plow = low, *phigh = high;
3941 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3942 type, TYPE, return an expression to test if EXP is in (or out of, depending
3943 on IN_P) the range. Return 0 if the test couldn't be created. */
3946 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3948 tree etype = TREE_TYPE (exp);
3951 #ifdef HAVE_canonicalize_funcptr_for_compare
3952 /* Disable this optimization for function pointer expressions
3953 on targets that require function pointer canonicalization. */
3954 if (HAVE_canonicalize_funcptr_for_compare
3955 && TREE_CODE (etype) == POINTER_TYPE
3956 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3962 value = build_range_check (type, exp, 1, low, high);
3964 return invert_truthvalue (value);
3969 if (low == 0 && high == 0)
3970 return fold_convert (type, integer_one_node);
3973 return fold_build2 (LE_EXPR, type, exp,
3974 fold_convert (etype, high));
3977 return fold_build2 (GE_EXPR, type, exp,
3978 fold_convert (etype, low));
3980 if (operand_equal_p (low, high, 0))
3981 return fold_build2 (EQ_EXPR, type, exp,
3982 fold_convert (etype, low));
3984 if (integer_zerop (low))
3986 if (! TYPE_UNSIGNED (etype))
3988 etype = lang_hooks.types.unsigned_type (etype);
3989 high = fold_convert (etype, high);
3990 exp = fold_convert (etype, exp);
3992 return build_range_check (type, exp, 1, 0, high);
3995 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3996 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3998 unsigned HOST_WIDE_INT lo;
4002 prec = TYPE_PRECISION (etype);
4003 if (prec <= HOST_BITS_PER_WIDE_INT)
4006 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4010 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4011 lo = (unsigned HOST_WIDE_INT) -1;
4014 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4016 if (TYPE_UNSIGNED (etype))
4018 etype = lang_hooks.types.signed_type (etype);
4019 exp = fold_convert (etype, exp);
4021 return fold_build2 (GT_EXPR, type, exp,
4022 fold_convert (etype, integer_zero_node));
4026 value = const_binop (MINUS_EXPR, high, low, 0);
4027 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4028 && ! TYPE_UNSIGNED (etype))
4030 tree utype, minv, maxv;
4032 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4033 for the type in question, as we rely on this here. */
4034 switch (TREE_CODE (etype))
4039 /* There is no requirement that LOW be within the range of ETYPE
4040 if the latter is a subtype. It must, however, be within the base
4041 type of ETYPE. So be sure we do the subtraction in that type. */
4042 if (TREE_TYPE (etype))
4043 etype = TREE_TYPE (etype);
4044 utype = lang_hooks.types.unsigned_type (etype);
4045 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4046 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4047 integer_one_node, 1);
4048 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4049 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4053 high = fold_convert (etype, high);
4054 low = fold_convert (etype, low);
4055 exp = fold_convert (etype, exp);
4056 value = const_binop (MINUS_EXPR, high, low, 0);
4064 if (value != 0 && ! TREE_OVERFLOW (value))
4066 /* There is no requirement that LOW be within the range of ETYPE
4067 if the latter is a subtype. It must, however, be within the base
4068 type of ETYPE. So be sure we do the subtraction in that type. */
4069 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4071 etype = TREE_TYPE (etype);
4072 exp = fold_convert (etype, exp);
4073 low = fold_convert (etype, low);
4074 value = fold_convert (etype, value);
4077 return build_range_check (type,
4078 fold_build2 (MINUS_EXPR, etype, exp, low),
4079 1, build_int_cst (etype, 0), value);
4085 /* Given two ranges, see if we can merge them into one. Return 1 if we
4086 can, 0 if we can't. Set the output range into the specified parameters. */
4089 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4090 tree high0, int in1_p, tree low1, tree high1)
4098 int lowequal = ((low0 == 0 && low1 == 0)
4099 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4100 low0, 0, low1, 0)));
4101 int highequal = ((high0 == 0 && high1 == 0)
4102 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4103 high0, 1, high1, 1)));
4105 /* Make range 0 be the range that starts first, or ends last if they
4106 start at the same value. Swap them if it isn't. */
4107 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4110 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4111 high1, 1, high0, 1))))
4113 temp = in0_p, in0_p = in1_p, in1_p = temp;
4114 tem = low0, low0 = low1, low1 = tem;
4115 tem = high0, high0 = high1, high1 = tem;
4118 /* Now flag two cases, whether the ranges are disjoint or whether the
4119 second range is totally subsumed in the first. Note that the tests
4120 below are simplified by the ones above. */
4121 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4122 high0, 1, low1, 0));
4123 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4124 high1, 1, high0, 1));
4126 /* We now have four cases, depending on whether we are including or
4127 excluding the two ranges. */
4130 /* If they don't overlap, the result is false. If the second range
4131 is a subset it is the result. Otherwise, the range is from the start
4132 of the second to the end of the first. */
4134 in_p = 0, low = high = 0;
4136 in_p = 1, low = low1, high = high1;
4138 in_p = 1, low = low1, high = high0;
4141 else if (in0_p && ! in1_p)
4143 /* If they don't overlap, the result is the first range. If they are
4144 equal, the result is false. If the second range is a subset of the
4145 first, and the ranges begin at the same place, we go from just after
4146 the end of the first range to the end of the second. If the second
4147 range is not a subset of the first, or if it is a subset and both
4148 ranges end at the same place, the range starts at the start of the
4149 first range and ends just before the second range.
4150 Otherwise, we can't describe this as a single range. */
4152 in_p = 1, low = low0, high = high0;
4153 else if (lowequal && highequal)
4154 in_p = 0, low = high = 0;
4155 else if (subset && lowequal)
4157 in_p = 1, high = high0;
4158 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4159 integer_one_node, 0);
4161 else if (! subset || highequal)
4163 in_p = 1, low = low0;
4164 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4165 integer_one_node, 0);
4171 else if (! in0_p && in1_p)
4173 /* If they don't overlap, the result is the second range. If the second
4174 is a subset of the first, the result is false. Otherwise,
4175 the range starts just after the first range and ends at the
4176 end of the second. */
4178 in_p = 1, low = low1, high = high1;
4179 else if (subset || highequal)
4180 in_p = 0, low = high = 0;
4183 in_p = 1, high = high1;
4184 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4185 integer_one_node, 0);
4191 /* The case where we are excluding both ranges. Here the complex case
4192 is if they don't overlap. In that case, the only time we have a
4193 range is if they are adjacent. If the second is a subset of the
4194 first, the result is the first. Otherwise, the range to exclude
4195 starts at the beginning of the first range and ends at the end of the
4199 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4200 range_binop (PLUS_EXPR, NULL_TREE,
4202 integer_one_node, 1),
4204 in_p = 0, low = low0, high = high1;
4207 /* Canonicalize - [min, x] into - [-, x]. */
4208 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4209 switch (TREE_CODE (TREE_TYPE (low0)))
4212 if (TYPE_PRECISION (TREE_TYPE (low0))
4213 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4218 if (tree_int_cst_equal (low0,
4219 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4223 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4224 && integer_zerop (low0))
4231 /* Canonicalize - [x, max] into - [x, -]. */
4232 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4233 switch (TREE_CODE (TREE_TYPE (high1)))
4236 if (TYPE_PRECISION (TREE_TYPE (high1))
4237 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4242 if (tree_int_cst_equal (high1,
4243 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4247 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4248 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4250 integer_one_node, 1)))
4257 /* The ranges might be also adjacent between the maximum and
4258 minimum values of the given type. For
4259 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4260 return + [x + 1, y - 1]. */
4261 if (low0 == 0 && high1 == 0)
4263 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4264 integer_one_node, 1);
4265 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4266 integer_one_node, 0);
4267 if (low == 0 || high == 0)
4277 in_p = 0, low = low0, high = high0;
4279 in_p = 0, low = low0, high = high1;
4282 *pin_p = in_p, *plow = low, *phigh = high;
4287 /* Subroutine of fold, looking inside expressions of the form
4288 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4289 of the COND_EXPR. This function is being used also to optimize
4290 A op B ? C : A, by reversing the comparison first.
4292 Return a folded expression whose code is not a COND_EXPR
4293 anymore, or NULL_TREE if no folding opportunity is found. */
4296 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4298 enum tree_code comp_code = TREE_CODE (arg0);
4299 tree arg00 = TREE_OPERAND (arg0, 0);
4300 tree arg01 = TREE_OPERAND (arg0, 1);
4301 tree arg1_type = TREE_TYPE (arg1);
4307 /* If we have A op 0 ? A : -A, consider applying the following
4310 A == 0? A : -A same as -A
4311 A != 0? A : -A same as A
4312 A >= 0? A : -A same as abs (A)
4313 A > 0? A : -A same as abs (A)
4314 A <= 0? A : -A same as -abs (A)
4315 A < 0? A : -A same as -abs (A)
4317 None of these transformations work for modes with signed
4318 zeros. If A is +/-0, the first two transformations will
4319 change the sign of the result (from +0 to -0, or vice
4320 versa). The last four will fix the sign of the result,
4321 even though the original expressions could be positive or
4322 negative, depending on the sign of A.
4324 Note that all these transformations are correct if A is
4325 NaN, since the two alternatives (A and -A) are also NaNs. */
4326 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4327 ? real_zerop (arg01)
4328 : integer_zerop (arg01))
4329 && ((TREE_CODE (arg2) == NEGATE_EXPR
4330 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4331 /* In the case that A is of the form X-Y, '-A' (arg2) may
4332 have already been folded to Y-X, check for that. */
4333 || (TREE_CODE (arg1) == MINUS_EXPR
4334 && TREE_CODE (arg2) == MINUS_EXPR
4335 && operand_equal_p (TREE_OPERAND (arg1, 0),
4336 TREE_OPERAND (arg2, 1), 0)
4337 && operand_equal_p (TREE_OPERAND (arg1, 1),
4338 TREE_OPERAND (arg2, 0), 0))))
4343 tem = fold_convert (arg1_type, arg1);
4344 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4347 return pedantic_non_lvalue (fold_convert (type, arg1));
4350 if (flag_trapping_math)
4355 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4356 arg1 = fold_convert (lang_hooks.types.signed_type
4357 (TREE_TYPE (arg1)), arg1);
4358 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4359 return pedantic_non_lvalue (fold_convert (type, tem));
4362 if (flag_trapping_math)
4366 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4367 arg1 = fold_convert (lang_hooks.types.signed_type
4368 (TREE_TYPE (arg1)), arg1);
4369 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4370 return negate_expr (fold_convert (type, tem));
4372 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4376 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4377 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4378 both transformations are correct when A is NaN: A != 0
4379 is then true, and A == 0 is false. */
4381 if (integer_zerop (arg01) && integer_zerop (arg2))
4383 if (comp_code == NE_EXPR)
4384 return pedantic_non_lvalue (fold_convert (type, arg1));
4385 else if (comp_code == EQ_EXPR)
4386 return fold_convert (type, integer_zero_node);
4389 /* Try some transformations of A op B ? A : B.
4391 A == B? A : B same as B
4392 A != B? A : B same as A
4393 A >= B? A : B same as max (A, B)
4394 A > B? A : B same as max (B, A)
4395 A <= B? A : B same as min (A, B)
4396 A < B? A : B same as min (B, A)
4398 As above, these transformations don't work in the presence
4399 of signed zeros. For example, if A and B are zeros of
4400 opposite sign, the first two transformations will change
4401 the sign of the result. In the last four, the original
4402 expressions give different results for (A=+0, B=-0) and
4403 (A=-0, B=+0), but the transformed expressions do not.
4405 The first two transformations are correct if either A or B
4406 is a NaN. In the first transformation, the condition will
4407 be false, and B will indeed be chosen. In the case of the
4408 second transformation, the condition A != B will be true,
4409 and A will be chosen.
4411 The conversions to max() and min() are not correct if B is
4412 a number and A is not. The conditions in the original
4413 expressions will be false, so all four give B. The min()
4414 and max() versions would give a NaN instead. */
4415 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4416 /* Avoid these transformations if the COND_EXPR may be used
4417 as an lvalue in the C++ front-end. PR c++/19199. */
4419 || strcmp (lang_hooks.name, "GNU C++") != 0
4420 || ! maybe_lvalue_p (arg1)
4421 || ! maybe_lvalue_p (arg2)))
4423 tree comp_op0 = arg00;
4424 tree comp_op1 = arg01;
4425 tree comp_type = TREE_TYPE (comp_op0);
4427 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4428 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4438 return pedantic_non_lvalue (fold_convert (type, arg2));
4440 return pedantic_non_lvalue (fold_convert (type, arg1));
4445 /* In C++ a ?: expression can be an lvalue, so put the
4446 operand which will be used if they are equal first
4447 so that we can convert this back to the
4448 corresponding COND_EXPR. */
4449 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4451 comp_op0 = fold_convert (comp_type, comp_op0);
4452 comp_op1 = fold_convert (comp_type, comp_op1);
4453 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4454 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4455 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4456 return pedantic_non_lvalue (fold_convert (type, tem));
4463 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4465 comp_op0 = fold_convert (comp_type, comp_op0);
4466 comp_op1 = fold_convert (comp_type, comp_op1);
4467 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4468 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4469 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4470 return pedantic_non_lvalue (fold_convert (type, tem));
4474 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4475 return pedantic_non_lvalue (fold_convert (type, arg2));
4478 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4479 return pedantic_non_lvalue (fold_convert (type, arg1));
4482 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4487 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4488 we might still be able to simplify this. For example,
4489 if C1 is one less or one more than C2, this might have started
4490 out as a MIN or MAX and been transformed by this function.
4491 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4493 if (INTEGRAL_TYPE_P (type)
4494 && TREE_CODE (arg01) == INTEGER_CST
4495 && TREE_CODE (arg2) == INTEGER_CST)
4499 /* We can replace A with C1 in this case. */
4500 arg1 = fold_convert (type, arg01);
4501 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4504 /* If C1 is C2 + 1, this is min(A, C2). */
4505 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4507 && operand_equal_p (arg01,
4508 const_binop (PLUS_EXPR, arg2,
4509 integer_one_node, 0),
4511 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4516 /* If C1 is C2 - 1, this is min(A, C2). */
4517 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4519 && operand_equal_p (arg01,
4520 const_binop (MINUS_EXPR, arg2,
4521 integer_one_node, 0),
4523 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4528 /* If C1 is C2 - 1, this is max(A, C2). */
4529 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4531 && operand_equal_p (arg01,
4532 const_binop (MINUS_EXPR, arg2,
4533 integer_one_node, 0),
4535 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4540 /* If C1 is C2 + 1, this is max(A, C2). */
4541 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4543 && operand_equal_p (arg01,
4544 const_binop (PLUS_EXPR, arg2,
4545 integer_one_node, 0),
4547 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4561 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4562 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4565 /* EXP is some logical combination of boolean tests. See if we can
4566 merge it into some range test. Return the new tree if so. */
4569 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4571 int or_op = (code == TRUTH_ORIF_EXPR
4572 || code == TRUTH_OR_EXPR);
4573 int in0_p, in1_p, in_p;
4574 tree low0, low1, low, high0, high1, high;
4575 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4576 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4579 /* If this is an OR operation, invert both sides; we will invert
4580 again at the end. */
4582 in0_p = ! in0_p, in1_p = ! in1_p;
4584 /* If both expressions are the same, if we can merge the ranges, and we
4585 can build the range test, return it or it inverted. If one of the
4586 ranges is always true or always false, consider it to be the same
4587 expression as the other. */
4588 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4589 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4591 && 0 != (tem = (build_range_check (type,
4593 : rhs != 0 ? rhs : integer_zero_node,
4595 return or_op ? invert_truthvalue (tem) : tem;
4597 /* On machines where the branch cost is expensive, if this is a
4598 short-circuited branch and the underlying object on both sides
4599 is the same, make a non-short-circuit operation. */
4600 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4601 && lhs != 0 && rhs != 0
4602 && (code == TRUTH_ANDIF_EXPR
4603 || code == TRUTH_ORIF_EXPR)
4604 && operand_equal_p (lhs, rhs, 0))
4606 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4607 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4608 which cases we can't do this. */
4609 if (simple_operand_p (lhs))
4610 return build2 (code == TRUTH_ANDIF_EXPR
4611 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4614 else if (lang_hooks.decls.global_bindings_p () == 0
4615 && ! CONTAINS_PLACEHOLDER_P (lhs))
4617 tree common = save_expr (lhs);
4619 if (0 != (lhs = build_range_check (type, common,
4620 or_op ? ! in0_p : in0_p,
4622 && (0 != (rhs = build_range_check (type, common,
4623 or_op ? ! in1_p : in1_p,
4625 return build2 (code == TRUTH_ANDIF_EXPR
4626 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4634 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4635 bit value. Arrange things so the extra bits will be set to zero if and
4636 only if C is signed-extended to its full width. If MASK is nonzero,
4637 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4640 unextend (tree c, int p, int unsignedp, tree mask)
4642 tree type = TREE_TYPE (c);
4643 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4646 if (p == modesize || unsignedp)
4649 /* We work by getting just the sign bit into the low-order bit, then
4650 into the high-order bit, then sign-extend. We then XOR that value
4652 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4653 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4655 /* We must use a signed type in order to get an arithmetic right shift.
4656 However, we must also avoid introducing accidental overflows, so that
4657 a subsequent call to integer_zerop will work. Hence we must
4658 do the type conversion here. At this point, the constant is either
4659 zero or one, and the conversion to a signed type can never overflow.
4660 We could get an overflow if this conversion is done anywhere else. */
4661 if (TYPE_UNSIGNED (type))
4662 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4664 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4665 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4667 temp = const_binop (BIT_AND_EXPR, temp,
4668 fold_convert (TREE_TYPE (c), mask), 0);
4669 /* If necessary, convert the type back to match the type of C. */
4670 if (TYPE_UNSIGNED (type))
4671 temp = fold_convert (type, temp);
4673 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4676 /* Find ways of folding logical expressions of LHS and RHS:
4677 Try to merge two comparisons to the same innermost item.
4678 Look for range tests like "ch >= '0' && ch <= '9'".
4679 Look for combinations of simple terms on machines with expensive branches
4680 and evaluate the RHS unconditionally.
4682 For example, if we have p->a == 2 && p->b == 4 and we can make an
4683 object large enough to span both A and B, we can do this with a comparison
4684 against the object ANDed with the a mask.
4686 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4687 operations to do this with one comparison.
4689 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4690 function and the one above.
4692 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4693 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4695 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4698 We return the simplified tree or 0 if no optimization is possible. */
4701 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4703 /* If this is the "or" of two comparisons, we can do something if
4704 the comparisons are NE_EXPR. If this is the "and", we can do something
4705 if the comparisons are EQ_EXPR. I.e.,
4706 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4708 WANTED_CODE is this operation code. For single bit fields, we can
4709 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4710 comparison for one-bit fields. */
4712 enum tree_code wanted_code;
4713 enum tree_code lcode, rcode;
4714 tree ll_arg, lr_arg, rl_arg, rr_arg;
4715 tree ll_inner, lr_inner, rl_inner, rr_inner;
4716 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4717 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4718 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4719 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4720 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4721 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4722 enum machine_mode lnmode, rnmode;
4723 tree ll_mask, lr_mask, rl_mask, rr_mask;
4724 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4725 tree l_const, r_const;
4726 tree lntype, rntype, result;
4727 int first_bit, end_bit;
4730 /* Start by getting the comparison codes. Fail if anything is volatile.
4731 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4732 it were surrounded with a NE_EXPR. */
4734 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4737 lcode = TREE_CODE (lhs);
4738 rcode = TREE_CODE (rhs);
4740 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4742 lhs = build2 (NE_EXPR, truth_type, lhs,
4743 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4747 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4749 rhs = build2 (NE_EXPR, truth_type, rhs,
4750 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4754 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4755 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4758 ll_arg = TREE_OPERAND (lhs, 0);
4759 lr_arg = TREE_OPERAND (lhs, 1);
4760 rl_arg = TREE_OPERAND (rhs, 0);
4761 rr_arg = TREE_OPERAND (rhs, 1);
4763 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4764 if (simple_operand_p (ll_arg)
4765 && simple_operand_p (lr_arg))
4768 if (operand_equal_p (ll_arg, rl_arg, 0)
4769 && operand_equal_p (lr_arg, rr_arg, 0))
4771 result = combine_comparisons (code, lcode, rcode,
4772 truth_type, ll_arg, lr_arg);
4776 else if (operand_equal_p (ll_arg, rr_arg, 0)
4777 && operand_equal_p (lr_arg, rl_arg, 0))
4779 result = combine_comparisons (code, lcode,
4780 swap_tree_comparison (rcode),
4781 truth_type, ll_arg, lr_arg);
4787 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4788 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4790 /* If the RHS can be evaluated unconditionally and its operands are
4791 simple, it wins to evaluate the RHS unconditionally on machines
4792 with expensive branches. In this case, this isn't a comparison
4793 that can be merged. Avoid doing this if the RHS is a floating-point
4794 comparison since those can trap. */
4796 if (BRANCH_COST >= 2
4797 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4798 && simple_operand_p (rl_arg)
4799 && simple_operand_p (rr_arg))
4801 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4802 if (code == TRUTH_OR_EXPR
4803 && lcode == NE_EXPR && integer_zerop (lr_arg)
4804 && rcode == NE_EXPR && integer_zerop (rr_arg)
4805 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4806 return build2 (NE_EXPR, truth_type,
4807 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4809 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4811 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4812 if (code == TRUTH_AND_EXPR
4813 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4814 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4815 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4816 return build2 (EQ_EXPR, truth_type,
4817 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4819 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4821 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4822 return build2 (code, truth_type, lhs, rhs);
4825 /* See if the comparisons can be merged. Then get all the parameters for
4828 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4829 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4833 ll_inner = decode_field_reference (ll_arg,
4834 &ll_bitsize, &ll_bitpos, &ll_mode,
4835 &ll_unsignedp, &volatilep, &ll_mask,
4837 lr_inner = decode_field_reference (lr_arg,
4838 &lr_bitsize, &lr_bitpos, &lr_mode,
4839 &lr_unsignedp, &volatilep, &lr_mask,
4841 rl_inner = decode_field_reference (rl_arg,
4842 &rl_bitsize, &rl_bitpos, &rl_mode,
4843 &rl_unsignedp, &volatilep, &rl_mask,
4845 rr_inner = decode_field_reference (rr_arg,
4846 &rr_bitsize, &rr_bitpos, &rr_mode,
4847 &rr_unsignedp, &volatilep, &rr_mask,
4850 /* It must be true that the inner operation on the lhs of each
4851 comparison must be the same if we are to be able to do anything.
4852 Then see if we have constants. If not, the same must be true for
4854 if (volatilep || ll_inner == 0 || rl_inner == 0
4855 || ! operand_equal_p (ll_inner, rl_inner, 0))
4858 if (TREE_CODE (lr_arg) == INTEGER_CST
4859 && TREE_CODE (rr_arg) == INTEGER_CST)
4860 l_const = lr_arg, r_const = rr_arg;
4861 else if (lr_inner == 0 || rr_inner == 0
4862 || ! operand_equal_p (lr_inner, rr_inner, 0))
4865 l_const = r_const = 0;
4867 /* If either comparison code is not correct for our logical operation,
4868 fail. However, we can convert a one-bit comparison against zero into
4869 the opposite comparison against that bit being set in the field. */
4871 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4872 if (lcode != wanted_code)
4874 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4876 /* Make the left operand unsigned, since we are only interested
4877 in the value of one bit. Otherwise we are doing the wrong
4886 /* This is analogous to the code for l_const above. */
4887 if (rcode != wanted_code)
4889 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4898 /* After this point all optimizations will generate bit-field
4899 references, which we might not want. */
4900 if (! lang_hooks.can_use_bit_fields_p ())
4903 /* See if we can find a mode that contains both fields being compared on
4904 the left. If we can't, fail. Otherwise, update all constants and masks
4905 to be relative to a field of that size. */
4906 first_bit = MIN (ll_bitpos, rl_bitpos);
4907 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4908 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4909 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4911 if (lnmode == VOIDmode)
4914 lnbitsize = GET_MODE_BITSIZE (lnmode);
4915 lnbitpos = first_bit & ~ (lnbitsize - 1);
4916 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4917 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4919 if (BYTES_BIG_ENDIAN)
4921 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4922 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4925 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4926 size_int (xll_bitpos), 0);
4927 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4928 size_int (xrl_bitpos), 0);
4932 l_const = fold_convert (lntype, l_const);
4933 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4934 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4935 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4936 fold_build1 (BIT_NOT_EXPR,
4940 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4942 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4947 r_const = fold_convert (lntype, r_const);
4948 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4949 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4950 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4951 fold_build1 (BIT_NOT_EXPR,
4955 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4957 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4961 /* If the right sides are not constant, do the same for it. Also,
4962 disallow this optimization if a size or signedness mismatch occurs
4963 between the left and right sides. */
4966 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4967 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4968 /* Make sure the two fields on the right
4969 correspond to the left without being swapped. */
4970 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4973 first_bit = MIN (lr_bitpos, rr_bitpos);
4974 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4975 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4976 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4978 if (rnmode == VOIDmode)
4981 rnbitsize = GET_MODE_BITSIZE (rnmode);
4982 rnbitpos = first_bit & ~ (rnbitsize - 1);
4983 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4984 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4986 if (BYTES_BIG_ENDIAN)
4988 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4989 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4992 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4993 size_int (xlr_bitpos), 0);
4994 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4995 size_int (xrr_bitpos), 0);
4997 /* Make a mask that corresponds to both fields being compared.
4998 Do this for both items being compared. If the operands are the
4999 same size and the bits being compared are in the same position
5000 then we can do this by masking both and comparing the masked
5002 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5003 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5004 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5006 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5007 ll_unsignedp || rl_unsignedp);
5008 if (! all_ones_mask_p (ll_mask, lnbitsize))
5009 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5011 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5012 lr_unsignedp || rr_unsignedp);
5013 if (! all_ones_mask_p (lr_mask, rnbitsize))
5014 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5016 return build2 (wanted_code, truth_type, lhs, rhs);
5019 /* There is still another way we can do something: If both pairs of
5020 fields being compared are adjacent, we may be able to make a wider
5021 field containing them both.
5023 Note that we still must mask the lhs/rhs expressions. Furthermore,
5024 the mask must be shifted to account for the shift done by
5025 make_bit_field_ref. */
5026 if ((ll_bitsize + ll_bitpos == rl_bitpos
5027 && lr_bitsize + lr_bitpos == rr_bitpos)
5028 || (ll_bitpos == rl_bitpos + rl_bitsize
5029 && lr_bitpos == rr_bitpos + rr_bitsize))
5033 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5034 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5035 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5036 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5038 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5039 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5040 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5041 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5043 /* Convert to the smaller type before masking out unwanted bits. */
5045 if (lntype != rntype)
5047 if (lnbitsize > rnbitsize)
5049 lhs = fold_convert (rntype, lhs);
5050 ll_mask = fold_convert (rntype, ll_mask);
5053 else if (lnbitsize < rnbitsize)
5055 rhs = fold_convert (lntype, rhs);
5056 lr_mask = fold_convert (lntype, lr_mask);
5061 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5062 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5064 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5065 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5067 return build2 (wanted_code, truth_type, lhs, rhs);
5073 /* Handle the case of comparisons with constants. If there is something in
5074 common between the masks, those bits of the constants must be the same.
5075 If not, the condition is always false. Test for this to avoid generating
5076 incorrect code below. */
5077 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5078 if (! integer_zerop (result)
5079 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5080 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5082 if (wanted_code == NE_EXPR)
5084 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5085 return constant_boolean_node (true, truth_type);
5089 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5090 return constant_boolean_node (false, truth_type);
5094 /* Construct the expression we will return. First get the component
5095 reference we will make. Unless the mask is all ones the width of
5096 that field, perform the mask operation. Then compare with the
5098 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5099 ll_unsignedp || rl_unsignedp);
5101 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5102 if (! all_ones_mask_p (ll_mask, lnbitsize))
5103 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5105 return build2 (wanted_code, truth_type, result,
5106 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5109 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5113 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5116 enum tree_code op_code;
5117 tree comp_const = op1;
5119 int consts_equal, consts_lt;
5122 STRIP_SIGN_NOPS (arg0);
5124 op_code = TREE_CODE (arg0);
5125 minmax_const = TREE_OPERAND (arg0, 1);
5126 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5127 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5128 inner = TREE_OPERAND (arg0, 0);
5130 /* If something does not permit us to optimize, return the original tree. */
5131 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5132 || TREE_CODE (comp_const) != INTEGER_CST
5133 || TREE_CONSTANT_OVERFLOW (comp_const)
5134 || TREE_CODE (minmax_const) != INTEGER_CST
5135 || TREE_CONSTANT_OVERFLOW (minmax_const))
5138 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5139 and GT_EXPR, doing the rest with recursive calls using logical
5143 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5145 /* FIXME: We should be able to invert code without building a
5146 scratch tree node, but doing so would require us to
5147 duplicate a part of invert_truthvalue here. */
5148 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5149 tem = optimize_minmax_comparison (TREE_CODE (tem),
5151 TREE_OPERAND (tem, 0),
5152 TREE_OPERAND (tem, 1));
5153 return invert_truthvalue (tem);
5158 fold_build2 (TRUTH_ORIF_EXPR, type,
5159 optimize_minmax_comparison
5160 (EQ_EXPR, type, arg0, comp_const),
5161 optimize_minmax_comparison
5162 (GT_EXPR, type, arg0, comp_const));
5165 if (op_code == MAX_EXPR && consts_equal)
5166 /* MAX (X, 0) == 0 -> X <= 0 */
5167 return fold_build2 (LE_EXPR, type, inner, comp_const);
5169 else if (op_code == MAX_EXPR && consts_lt)
5170 /* MAX (X, 0) == 5 -> X == 5 */
5171 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5173 else if (op_code == MAX_EXPR)
5174 /* MAX (X, 0) == -1 -> false */
5175 return omit_one_operand (type, integer_zero_node, inner);
5177 else if (consts_equal)
5178 /* MIN (X, 0) == 0 -> X >= 0 */
5179 return fold_build2 (GE_EXPR, type, inner, comp_const);
5182 /* MIN (X, 0) == 5 -> false */
5183 return omit_one_operand (type, integer_zero_node, inner);
5186 /* MIN (X, 0) == -1 -> X == -1 */
5187 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5190 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5191 /* MAX (X, 0) > 0 -> X > 0
5192 MAX (X, 0) > 5 -> X > 5 */
5193 return fold_build2 (GT_EXPR, type, inner, comp_const);
5195 else if (op_code == MAX_EXPR)
5196 /* MAX (X, 0) > -1 -> true */
5197 return omit_one_operand (type, integer_one_node, inner);
5199 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5200 /* MIN (X, 0) > 0 -> false
5201 MIN (X, 0) > 5 -> false */
5202 return omit_one_operand (type, integer_zero_node, inner);
5205 /* MIN (X, 0) > -1 -> X > -1 */
5206 return fold_build2 (GT_EXPR, type, inner, comp_const);
5213 /* T is an integer expression that is being multiplied, divided, or taken a
5214 modulus (CODE says which and what kind of divide or modulus) by a
5215 constant C. See if we can eliminate that operation by folding it with
5216 other operations already in T. WIDE_TYPE, if non-null, is a type that
5217 should be used for the computation if wider than our type.
5219 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5220 (X * 2) + (Y * 4). We must, however, be assured that either the original
5221 expression would not overflow or that overflow is undefined for the type
5222 in the language in question.
5224 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5225 the machine has a multiply-accumulate insn or that this is part of an
5226 addressing calculation.
5228 If we return a non-null expression, it is an equivalent form of the
5229 original computation, but need not be in the original type. */
5232 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5234 /* To avoid exponential search depth, refuse to allow recursion past
5235 three levels. Beyond that (1) it's highly unlikely that we'll find
5236 something interesting and (2) we've probably processed it before
5237 when we built the inner expression. */
5246 ret = extract_muldiv_1 (t, c, code, wide_type);
5253 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5255 tree type = TREE_TYPE (t);
5256 enum tree_code tcode = TREE_CODE (t);
5257 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5258 > GET_MODE_SIZE (TYPE_MODE (type)))
5259 ? wide_type : type);
5261 int same_p = tcode == code;
5262 tree op0 = NULL_TREE, op1 = NULL_TREE;
5264 /* Don't deal with constants of zero here; they confuse the code below. */
5265 if (integer_zerop (c))
5268 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5269 op0 = TREE_OPERAND (t, 0);
5271 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5272 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5274 /* Note that we need not handle conditional operations here since fold
5275 already handles those cases. So just do arithmetic here. */
5279 /* For a constant, we can always simplify if we are a multiply
5280 or (for divide and modulus) if it is a multiple of our constant. */
5281 if (code == MULT_EXPR
5282 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5283 return const_binop (code, fold_convert (ctype, t),
5284 fold_convert (ctype, c), 0);
5287 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5288 /* If op0 is an expression ... */
5289 if ((COMPARISON_CLASS_P (op0)
5290 || UNARY_CLASS_P (op0)
5291 || BINARY_CLASS_P (op0)
5292 || EXPRESSION_CLASS_P (op0))
5293 /* ... and is unsigned, and its type is smaller than ctype,
5294 then we cannot pass through as widening. */
5295 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5296 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5297 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5298 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5299 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5300 /* ... or this is a truncation (t is narrower than op0),
5301 then we cannot pass through this narrowing. */
5302 || (GET_MODE_SIZE (TYPE_MODE (type))
5303 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5304 /* ... or signedness changes for division or modulus,
5305 then we cannot pass through this conversion. */
5306 || (code != MULT_EXPR
5307 && (TYPE_UNSIGNED (ctype)
5308 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5311 /* Pass the constant down and see if we can make a simplification. If
5312 we can, replace this expression with the inner simplification for
5313 possible later conversion to our or some other type. */
5314 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5315 && TREE_CODE (t2) == INTEGER_CST
5316 && ! TREE_CONSTANT_OVERFLOW (t2)
5317 && (0 != (t1 = extract_muldiv (op0, t2, code,
5319 ? ctype : NULL_TREE))))
5324 /* If widening the type changes it from signed to unsigned, then we
5325 must avoid building ABS_EXPR itself as unsigned. */
5326 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5328 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5329 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5331 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5332 return fold_convert (ctype, t1);
5338 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5339 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5342 case MIN_EXPR: case MAX_EXPR:
5343 /* If widening the type changes the signedness, then we can't perform
5344 this optimization as that changes the result. */
5345 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5348 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5349 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5350 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5352 if (tree_int_cst_sgn (c) < 0)
5353 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5355 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5356 fold_convert (ctype, t2));
5360 case LSHIFT_EXPR: case RSHIFT_EXPR:
5361 /* If the second operand is constant, this is a multiplication
5362 or floor division, by a power of two, so we can treat it that
5363 way unless the multiplier or divisor overflows. Signed
5364 left-shift overflow is implementation-defined rather than
5365 undefined in C90, so do not convert signed left shift into
5367 if (TREE_CODE (op1) == INTEGER_CST
5368 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5369 /* const_binop may not detect overflow correctly,
5370 so check for it explicitly here. */
5371 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5372 && TREE_INT_CST_HIGH (op1) == 0
5373 && 0 != (t1 = fold_convert (ctype,
5374 const_binop (LSHIFT_EXPR,
5377 && ! TREE_OVERFLOW (t1))
5378 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5379 ? MULT_EXPR : FLOOR_DIV_EXPR,
5380 ctype, fold_convert (ctype, op0), t1),
5381 c, code, wide_type);
5384 case PLUS_EXPR: case MINUS_EXPR:
5385 /* See if we can eliminate the operation on both sides. If we can, we
5386 can return a new PLUS or MINUS. If we can't, the only remaining
5387 cases where we can do anything are if the second operand is a
5389 t1 = extract_muldiv (op0, c, code, wide_type);
5390 t2 = extract_muldiv (op1, c, code, wide_type);
5391 if (t1 != 0 && t2 != 0
5392 && (code == MULT_EXPR
5393 /* If not multiplication, we can only do this if both operands
5394 are divisible by c. */
5395 || (multiple_of_p (ctype, op0, c)
5396 && multiple_of_p (ctype, op1, c))))
5397 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5398 fold_convert (ctype, t2));
5400 /* If this was a subtraction, negate OP1 and set it to be an addition.
5401 This simplifies the logic below. */
5402 if (tcode == MINUS_EXPR)
5403 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5405 if (TREE_CODE (op1) != INTEGER_CST)
5408 /* If either OP1 or C are negative, this optimization is not safe for
5409 some of the division and remainder types while for others we need
5410 to change the code. */
5411 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5413 if (code == CEIL_DIV_EXPR)
5414 code = FLOOR_DIV_EXPR;
5415 else if (code == FLOOR_DIV_EXPR)
5416 code = CEIL_DIV_EXPR;
5417 else if (code != MULT_EXPR
5418 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5422 /* If it's a multiply or a division/modulus operation of a multiple
5423 of our constant, do the operation and verify it doesn't overflow. */
5424 if (code == MULT_EXPR
5425 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5427 op1 = const_binop (code, fold_convert (ctype, op1),
5428 fold_convert (ctype, c), 0);
5429 /* We allow the constant to overflow with wrapping semantics. */
5431 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5437 /* If we have an unsigned type is not a sizetype, we cannot widen
5438 the operation since it will change the result if the original
5439 computation overflowed. */
5440 if (TYPE_UNSIGNED (ctype)
5441 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5445 /* If we were able to eliminate our operation from the first side,
5446 apply our operation to the second side and reform the PLUS. */
5447 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5448 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5450 /* The last case is if we are a multiply. In that case, we can
5451 apply the distributive law to commute the multiply and addition
5452 if the multiplication of the constants doesn't overflow. */
5453 if (code == MULT_EXPR)
5454 return fold_build2 (tcode, ctype,
5455 fold_build2 (code, ctype,
5456 fold_convert (ctype, op0),
5457 fold_convert (ctype, c)),
5463 /* We have a special case here if we are doing something like
5464 (C * 8) % 4 since we know that's zero. */
5465 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5466 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5467 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5468 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5469 return omit_one_operand (type, integer_zero_node, op0);
5471 /* ... fall through ... */
5473 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5474 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5475 /* If we can extract our operation from the LHS, do so and return a
5476 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5477 do something only if the second operand is a constant. */
5479 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5480 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5481 fold_convert (ctype, op1));
5482 else if (tcode == MULT_EXPR && code == MULT_EXPR
5483 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5484 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5485 fold_convert (ctype, t1));
5486 else if (TREE_CODE (op1) != INTEGER_CST)
5489 /* If these are the same operation types, we can associate them
5490 assuming no overflow. */
5492 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5493 fold_convert (ctype, c), 0))
5494 && ! TREE_OVERFLOW (t1))
5495 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5497 /* If these operations "cancel" each other, we have the main
5498 optimizations of this pass, which occur when either constant is a
5499 multiple of the other, in which case we replace this with either an
5500 operation or CODE or TCODE.
5502 If we have an unsigned type that is not a sizetype, we cannot do
5503 this since it will change the result if the original computation
5505 if ((! TYPE_UNSIGNED (ctype)
5506 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5508 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5509 || (tcode == MULT_EXPR
5510 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5511 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5513 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5514 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5515 fold_convert (ctype,
5516 const_binop (TRUNC_DIV_EXPR,
5518 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5519 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5520 fold_convert (ctype,
5521 const_binop (TRUNC_DIV_EXPR,
5533 /* Return a node which has the indicated constant VALUE (either 0 or
5534 1), and is of the indicated TYPE. */
5537 constant_boolean_node (int value, tree type)
5539 if (type == integer_type_node)
5540 return value ? integer_one_node : integer_zero_node;
5541 else if (type == boolean_type_node)
5542 return value ? boolean_true_node : boolean_false_node;
5544 return build_int_cst (type, value);
5548 /* Return true if expr looks like an ARRAY_REF and set base and
5549 offset to the appropriate trees. If there is no offset,
5550 offset is set to NULL_TREE. Base will be canonicalized to
5551 something you can get the element type from using
5552 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5553 in bytes to the base. */
5556 extract_array_ref (tree expr, tree *base, tree *offset)
5558 /* One canonical form is a PLUS_EXPR with the first
5559 argument being an ADDR_EXPR with a possible NOP_EXPR
5561 if (TREE_CODE (expr) == PLUS_EXPR)
5563 tree op0 = TREE_OPERAND (expr, 0);
5564 tree inner_base, dummy1;
5565 /* Strip NOP_EXPRs here because the C frontends and/or
5566 folders present us (int *)&x.a + 4B possibly. */
5568 if (extract_array_ref (op0, &inner_base, &dummy1))
5571 if (dummy1 == NULL_TREE)
5572 *offset = TREE_OPERAND (expr, 1);
5574 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5575 dummy1, TREE_OPERAND (expr, 1));
5579 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5580 which we transform into an ADDR_EXPR with appropriate
5581 offset. For other arguments to the ADDR_EXPR we assume
5582 zero offset and as such do not care about the ADDR_EXPR
5583 type and strip possible nops from it. */
5584 else if (TREE_CODE (expr) == ADDR_EXPR)
5586 tree op0 = TREE_OPERAND (expr, 0);
5587 if (TREE_CODE (op0) == ARRAY_REF)
5589 tree idx = TREE_OPERAND (op0, 1);
5590 *base = TREE_OPERAND (op0, 0);
5591 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5592 array_ref_element_size (op0));
5596 /* Handle array-to-pointer decay as &a. */
5597 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5598 *base = TREE_OPERAND (expr, 0);
5601 *offset = NULL_TREE;
5605 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5606 else if (SSA_VAR_P (expr)
5607 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5610 *offset = NULL_TREE;
5618 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5619 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5620 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5621 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5622 COND is the first argument to CODE; otherwise (as in the example
5623 given here), it is the second argument. TYPE is the type of the
5624 original expression. Return NULL_TREE if no simplification is
5628 fold_binary_op_with_conditional_arg (enum tree_code code,
5629 tree type, tree op0, tree op1,
5630 tree cond, tree arg, int cond_first_p)
5632 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5633 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5634 tree test, true_value, false_value;
5635 tree lhs = NULL_TREE;
5636 tree rhs = NULL_TREE;
5638 /* This transformation is only worthwhile if we don't have to wrap
5639 arg in a SAVE_EXPR, and the operation can be simplified on at least
5640 one of the branches once its pushed inside the COND_EXPR. */
5641 if (!TREE_CONSTANT (arg))
5644 if (TREE_CODE (cond) == COND_EXPR)
5646 test = TREE_OPERAND (cond, 0);
5647 true_value = TREE_OPERAND (cond, 1);
5648 false_value = TREE_OPERAND (cond, 2);
5649 /* If this operand throws an expression, then it does not make
5650 sense to try to perform a logical or arithmetic operation
5652 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5654 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5659 tree testtype = TREE_TYPE (cond);
5661 true_value = constant_boolean_node (true, testtype);
5662 false_value = constant_boolean_node (false, testtype);
5665 arg = fold_convert (arg_type, arg);
5668 true_value = fold_convert (cond_type, true_value);
5670 lhs = fold_build2 (code, type, true_value, arg);
5672 lhs = fold_build2 (code, type, arg, true_value);
5676 false_value = fold_convert (cond_type, false_value);
5678 rhs = fold_build2 (code, type, false_value, arg);
5680 rhs = fold_build2 (code, type, arg, false_value);
5683 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5684 return fold_convert (type, test);
5688 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5690 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5691 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5692 ADDEND is the same as X.
5694 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5695 and finite. The problematic cases are when X is zero, and its mode
5696 has signed zeros. In the case of rounding towards -infinity,
5697 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5698 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5701 fold_real_zero_addition_p (tree type, tree addend, int negate)
5703 if (!real_zerop (addend))
5706 /* Don't allow the fold with -fsignaling-nans. */
5707 if (HONOR_SNANS (TYPE_MODE (type)))
5710 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5711 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5714 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5715 if (TREE_CODE (addend) == REAL_CST
5716 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5719 /* The mode has signed zeros, and we have to honor their sign.
5720 In this situation, there is only one case we can return true for.
5721 X - 0 is the same as X unless rounding towards -infinity is
5723 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5726 /* Subroutine of fold() that checks comparisons of built-in math
5727 functions against real constants.
5729 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5730 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5731 is the type of the result and ARG0 and ARG1 are the operands of the
5732 comparison. ARG1 must be a TREE_REAL_CST.
5734 The function returns the constant folded tree if a simplification
5735 can be made, and NULL_TREE otherwise. */
5738 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5739 tree type, tree arg0, tree arg1)
5743 if (BUILTIN_SQRT_P (fcode))
5745 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5746 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5748 c = TREE_REAL_CST (arg1);
5749 if (REAL_VALUE_NEGATIVE (c))
5751 /* sqrt(x) < y is always false, if y is negative. */
5752 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5753 return omit_one_operand (type, integer_zero_node, arg);
5755 /* sqrt(x) > y is always true, if y is negative and we
5756 don't care about NaNs, i.e. negative values of x. */
5757 if (code == NE_EXPR || !HONOR_NANS (mode))
5758 return omit_one_operand (type, integer_one_node, arg);
5760 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5761 return fold_build2 (GE_EXPR, type, arg,
5762 build_real (TREE_TYPE (arg), dconst0));
5764 else if (code == GT_EXPR || code == GE_EXPR)
5768 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5769 real_convert (&c2, mode, &c2);
5771 if (REAL_VALUE_ISINF (c2))
5773 /* sqrt(x) > y is x == +Inf, when y is very large. */
5774 if (HONOR_INFINITIES (mode))
5775 return fold_build2 (EQ_EXPR, type, arg,
5776 build_real (TREE_TYPE (arg), c2));
5778 /* sqrt(x) > y is always false, when y is very large
5779 and we don't care about infinities. */
5780 return omit_one_operand (type, integer_zero_node, arg);
5783 /* sqrt(x) > c is the same as x > c*c. */
5784 return fold_build2 (code, type, arg,
5785 build_real (TREE_TYPE (arg), c2));
5787 else if (code == LT_EXPR || code == LE_EXPR)
5791 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5792 real_convert (&c2, mode, &c2);
5794 if (REAL_VALUE_ISINF (c2))
5796 /* sqrt(x) < y is always true, when y is a very large
5797 value and we don't care about NaNs or Infinities. */
5798 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5799 return omit_one_operand (type, integer_one_node, arg);
5801 /* sqrt(x) < y is x != +Inf when y is very large and we
5802 don't care about NaNs. */
5803 if (! HONOR_NANS (mode))
5804 return fold_build2 (NE_EXPR, type, arg,
5805 build_real (TREE_TYPE (arg), c2));
5807 /* sqrt(x) < y is x >= 0 when y is very large and we
5808 don't care about Infinities. */
5809 if (! HONOR_INFINITIES (mode))
5810 return fold_build2 (GE_EXPR, type, arg,
5811 build_real (TREE_TYPE (arg), dconst0));
5813 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5814 if (lang_hooks.decls.global_bindings_p () != 0
5815 || CONTAINS_PLACEHOLDER_P (arg))
5818 arg = save_expr (arg);
5819 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5820 fold_build2 (GE_EXPR, type, arg,
5821 build_real (TREE_TYPE (arg),
5823 fold_build2 (NE_EXPR, type, arg,
5824 build_real (TREE_TYPE (arg),
5828 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5829 if (! HONOR_NANS (mode))
5830 return fold_build2 (code, type, arg,
5831 build_real (TREE_TYPE (arg), c2));
5833 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5834 if (lang_hooks.decls.global_bindings_p () == 0
5835 && ! CONTAINS_PLACEHOLDER_P (arg))
5837 arg = save_expr (arg);
5838 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5839 fold_build2 (GE_EXPR, type, arg,
5840 build_real (TREE_TYPE (arg),
5842 fold_build2 (code, type, arg,
5843 build_real (TREE_TYPE (arg),
5852 /* Subroutine of fold() that optimizes comparisons against Infinities,
5853 either +Inf or -Inf.
5855 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5856 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5857 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5859 The function returns the constant folded tree if a simplification
5860 can be made, and NULL_TREE otherwise. */
5863 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5865 enum machine_mode mode;
5866 REAL_VALUE_TYPE max;
5870 mode = TYPE_MODE (TREE_TYPE (arg0));
5872 /* For negative infinity swap the sense of the comparison. */
5873 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5875 code = swap_tree_comparison (code);
5880 /* x > +Inf is always false, if with ignore sNANs. */
5881 if (HONOR_SNANS (mode))
5883 return omit_one_operand (type, integer_zero_node, arg0);
5886 /* x <= +Inf is always true, if we don't case about NaNs. */
5887 if (! HONOR_NANS (mode))
5888 return omit_one_operand (type, integer_one_node, arg0);
5890 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5891 if (lang_hooks.decls.global_bindings_p () == 0
5892 && ! CONTAINS_PLACEHOLDER_P (arg0))
5894 arg0 = save_expr (arg0);
5895 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5901 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5902 real_maxval (&max, neg, mode);
5903 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5904 arg0, build_real (TREE_TYPE (arg0), max));
5907 /* x < +Inf is always equal to x <= DBL_MAX. */
5908 real_maxval (&max, neg, mode);
5909 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5910 arg0, build_real (TREE_TYPE (arg0), max));
5913 /* x != +Inf is always equal to !(x > DBL_MAX). */
5914 real_maxval (&max, neg, mode);
5915 if (! HONOR_NANS (mode))
5916 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5917 arg0, build_real (TREE_TYPE (arg0), max));
5919 /* The transformation below creates non-gimple code and thus is
5920 not appropriate if we are in gimple form. */
5924 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5925 arg0, build_real (TREE_TYPE (arg0), max));
5926 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5935 /* Subroutine of fold() that optimizes comparisons of a division by
5936 a nonzero integer constant against an integer constant, i.e.
5939 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5940 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5941 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5943 The function returns the constant folded tree if a simplification
5944 can be made, and NULL_TREE otherwise. */
5947 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5949 tree prod, tmp, hi, lo;
5950 tree arg00 = TREE_OPERAND (arg0, 0);
5951 tree arg01 = TREE_OPERAND (arg0, 1);
5952 unsigned HOST_WIDE_INT lpart;
5953 HOST_WIDE_INT hpart;
5956 /* We have to do this the hard way to detect unsigned overflow.
5957 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5958 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5959 TREE_INT_CST_HIGH (arg01),
5960 TREE_INT_CST_LOW (arg1),
5961 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5962 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5963 prod = force_fit_type (prod, -1, overflow, false);
5965 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5967 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5970 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5971 overflow = add_double (TREE_INT_CST_LOW (prod),
5972 TREE_INT_CST_HIGH (prod),
5973 TREE_INT_CST_LOW (tmp),
5974 TREE_INT_CST_HIGH (tmp),
5976 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5977 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5978 TREE_CONSTANT_OVERFLOW (prod));
5980 else if (tree_int_cst_sgn (arg01) >= 0)
5982 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5983 switch (tree_int_cst_sgn (arg1))
5986 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5991 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5996 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6006 /* A negative divisor reverses the relational operators. */
6007 code = swap_tree_comparison (code);
6009 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6010 switch (tree_int_cst_sgn (arg1))
6013 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6018 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6023 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6035 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6036 return omit_one_operand (type, integer_zero_node, arg00);
6037 if (TREE_OVERFLOW (hi))
6038 return fold_build2 (GE_EXPR, type, arg00, lo);
6039 if (TREE_OVERFLOW (lo))
6040 return fold_build2 (LE_EXPR, type, arg00, hi);
6041 return build_range_check (type, arg00, 1, lo, hi);
6044 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6045 return omit_one_operand (type, integer_one_node, arg00);
6046 if (TREE_OVERFLOW (hi))
6047 return fold_build2 (LT_EXPR, type, arg00, lo);
6048 if (TREE_OVERFLOW (lo))
6049 return fold_build2 (GT_EXPR, type, arg00, hi);
6050 return build_range_check (type, arg00, 0, lo, hi);
6053 if (TREE_OVERFLOW (lo))
6054 return omit_one_operand (type, integer_zero_node, arg00);
6055 return fold_build2 (LT_EXPR, type, arg00, lo);
6058 if (TREE_OVERFLOW (hi))
6059 return omit_one_operand (type, integer_one_node, arg00);
6060 return fold_build2 (LE_EXPR, type, arg00, hi);
6063 if (TREE_OVERFLOW (hi))
6064 return omit_one_operand (type, integer_zero_node, arg00);
6065 return fold_build2 (GT_EXPR, type, arg00, hi);
6068 if (TREE_OVERFLOW (lo))
6069 return omit_one_operand (type, integer_one_node, arg00);
6070 return fold_build2 (GE_EXPR, type, arg00, lo);
6080 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6081 equality/inequality test, then return a simplified form of the test
6082 using a sign testing. Otherwise return NULL. TYPE is the desired
6086 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6089 /* If this is testing a single bit, we can optimize the test. */
6090 if ((code == NE_EXPR || code == EQ_EXPR)
6091 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6092 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6094 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6095 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6096 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6098 if (arg00 != NULL_TREE
6099 /* This is only a win if casting to a signed type is cheap,
6100 i.e. when arg00's type is not a partial mode. */
6101 && TYPE_PRECISION (TREE_TYPE (arg00))
6102 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6104 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6105 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6106 result_type, fold_convert (stype, arg00),
6107 fold_convert (stype, integer_zero_node));
6114 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6115 equality/inequality test, then return a simplified form of
6116 the test using shifts and logical operations. Otherwise return
6117 NULL. TYPE is the desired result type. */
6120 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6123 /* If this is testing a single bit, we can optimize the test. */
6124 if ((code == NE_EXPR || code == EQ_EXPR)
6125 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6126 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6128 tree inner = TREE_OPERAND (arg0, 0);
6129 tree type = TREE_TYPE (arg0);
6130 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6131 enum machine_mode operand_mode = TYPE_MODE (type);
6133 tree signed_type, unsigned_type, intermediate_type;
6136 /* First, see if we can fold the single bit test into a sign-bit
6138 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6143 /* Otherwise we have (A & C) != 0 where C is a single bit,
6144 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6145 Similarly for (A & C) == 0. */
6147 /* If INNER is a right shift of a constant and it plus BITNUM does
6148 not overflow, adjust BITNUM and INNER. */
6149 if (TREE_CODE (inner) == RSHIFT_EXPR
6150 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6151 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6152 && bitnum < TYPE_PRECISION (type)
6153 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6154 bitnum - TYPE_PRECISION (type)))
6156 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6157 inner = TREE_OPERAND (inner, 0);
6160 /* If we are going to be able to omit the AND below, we must do our
6161 operations as unsigned. If we must use the AND, we have a choice.
6162 Normally unsigned is faster, but for some machines signed is. */
6163 #ifdef LOAD_EXTEND_OP
6164 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6165 && !flag_syntax_only) ? 0 : 1;
6170 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6171 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6172 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6173 inner = fold_convert (intermediate_type, inner);
6176 inner = build2 (RSHIFT_EXPR, intermediate_type,
6177 inner, size_int (bitnum));
6179 if (code == EQ_EXPR)
6180 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6181 inner, integer_one_node);
6183 /* Put the AND last so it can combine with more things. */
6184 inner = build2 (BIT_AND_EXPR, intermediate_type,
6185 inner, integer_one_node);
6187 /* Make sure to return the proper type. */
6188 inner = fold_convert (result_type, inner);
6195 /* Check whether we are allowed to reorder operands arg0 and arg1,
6196 such that the evaluation of arg1 occurs before arg0. */
6199 reorder_operands_p (tree arg0, tree arg1)
6201 if (! flag_evaluation_order)
6203 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6205 return ! TREE_SIDE_EFFECTS (arg0)
6206 && ! TREE_SIDE_EFFECTS (arg1);
6209 /* Test whether it is preferable two swap two operands, ARG0 and
6210 ARG1, for example because ARG0 is an integer constant and ARG1
6211 isn't. If REORDER is true, only recommend swapping if we can
6212 evaluate the operands in reverse order. */
6215 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6217 STRIP_SIGN_NOPS (arg0);
6218 STRIP_SIGN_NOPS (arg1);
6220 if (TREE_CODE (arg1) == INTEGER_CST)
6222 if (TREE_CODE (arg0) == INTEGER_CST)
6225 if (TREE_CODE (arg1) == REAL_CST)
6227 if (TREE_CODE (arg0) == REAL_CST)
6230 if (TREE_CODE (arg1) == COMPLEX_CST)
6232 if (TREE_CODE (arg0) == COMPLEX_CST)
6235 if (TREE_CONSTANT (arg1))
6237 if (TREE_CONSTANT (arg0))
6243 if (reorder && flag_evaluation_order
6244 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6252 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6253 for commutative and comparison operators. Ensuring a canonical
6254 form allows the optimizers to find additional redundancies without
6255 having to explicitly check for both orderings. */
6256 if (TREE_CODE (arg0) == SSA_NAME
6257 && TREE_CODE (arg1) == SSA_NAME
6258 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6264 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6265 ARG0 is extended to a wider type. */
6268 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6270 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6272 tree shorter_type, outer_type;
6276 if (arg0_unw == arg0)
6278 shorter_type = TREE_TYPE (arg0_unw);
6280 #ifdef HAVE_canonicalize_funcptr_for_compare
6281 /* Disable this optimization if we're casting a function pointer
6282 type on targets that require function pointer canonicalization. */
6283 if (HAVE_canonicalize_funcptr_for_compare
6284 && TREE_CODE (shorter_type) == POINTER_TYPE
6285 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6289 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6292 arg1_unw = get_unwidened (arg1, shorter_type);
6294 /* If possible, express the comparison in the shorter mode. */
6295 if ((code == EQ_EXPR || code == NE_EXPR
6296 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6297 && (TREE_TYPE (arg1_unw) == shorter_type
6298 || (TREE_CODE (arg1_unw) == INTEGER_CST
6299 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6300 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6301 && int_fits_type_p (arg1_unw, shorter_type))))
6302 return fold_build2 (code, type, arg0_unw,
6303 fold_convert (shorter_type, arg1_unw));
6305 if (TREE_CODE (arg1_unw) != INTEGER_CST
6306 || TREE_CODE (shorter_type) != INTEGER_TYPE
6307 || !int_fits_type_p (arg1_unw, shorter_type))
6310 /* If we are comparing with the integer that does not fit into the range
6311 of the shorter type, the result is known. */
6312 outer_type = TREE_TYPE (arg1_unw);
6313 min = lower_bound_in_type (outer_type, shorter_type);
6314 max = upper_bound_in_type (outer_type, shorter_type);
6316 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6318 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6325 return omit_one_operand (type, integer_zero_node, arg0);
6330 return omit_one_operand (type, integer_one_node, arg0);
6336 return omit_one_operand (type, integer_one_node, arg0);
6338 return omit_one_operand (type, integer_zero_node, arg0);
6343 return omit_one_operand (type, integer_zero_node, arg0);
6345 return omit_one_operand (type, integer_one_node, arg0);
6354 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6355 ARG0 just the signedness is changed. */
6358 fold_sign_changed_comparison (enum tree_code code, tree type,
6359 tree arg0, tree arg1)
6361 tree arg0_inner, tmp;
6362 tree inner_type, outer_type;
6364 if (TREE_CODE (arg0) != NOP_EXPR
6365 && TREE_CODE (arg0) != CONVERT_EXPR)
6368 outer_type = TREE_TYPE (arg0);
6369 arg0_inner = TREE_OPERAND (arg0, 0);
6370 inner_type = TREE_TYPE (arg0_inner);
6372 #ifdef HAVE_canonicalize_funcptr_for_compare
6373 /* Disable this optimization if we're casting a function pointer
6374 type on targets that require function pointer canonicalization. */
6375 if (HAVE_canonicalize_funcptr_for_compare
6376 && TREE_CODE (inner_type) == POINTER_TYPE
6377 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6381 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6384 if (TREE_CODE (arg1) != INTEGER_CST
6385 && !((TREE_CODE (arg1) == NOP_EXPR
6386 || TREE_CODE (arg1) == CONVERT_EXPR)
6387 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6390 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6395 if (TREE_CODE (arg1) == INTEGER_CST)
6397 tmp = build_int_cst_wide (inner_type,
6398 TREE_INT_CST_LOW (arg1),
6399 TREE_INT_CST_HIGH (arg1));
6400 arg1 = force_fit_type (tmp, 0,
6401 TREE_OVERFLOW (arg1),
6402 TREE_CONSTANT_OVERFLOW (arg1));
6405 arg1 = fold_convert (inner_type, arg1);
6407 return fold_build2 (code, type, arg0_inner, arg1);
6410 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6411 step of the array. Reconstructs s and delta in the case of s * delta
6412 being an integer constant (and thus already folded).
6413 ADDR is the address. MULT is the multiplicative expression.
6414 If the function succeeds, the new address expression is returned. Otherwise
6415 NULL_TREE is returned. */
6418 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6420 tree s, delta, step;
6421 tree ref = TREE_OPERAND (addr, 0), pref;
6425 /* Canonicalize op1 into a possibly non-constant delta
6426 and an INTEGER_CST s. */
6427 if (TREE_CODE (op1) == MULT_EXPR)
6429 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6434 if (TREE_CODE (arg0) == INTEGER_CST)
6439 else if (TREE_CODE (arg1) == INTEGER_CST)
6447 else if (TREE_CODE (op1) == INTEGER_CST)
6454 /* Simulate we are delta * 1. */
6456 s = integer_one_node;
6459 for (;; ref = TREE_OPERAND (ref, 0))
6461 if (TREE_CODE (ref) == ARRAY_REF)
6463 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6467 step = array_ref_element_size (ref);
6468 if (TREE_CODE (step) != INTEGER_CST)
6473 if (! tree_int_cst_equal (step, s))
6478 /* Try if delta is a multiple of step. */
6479 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6488 if (!handled_component_p (ref))
6492 /* We found the suitable array reference. So copy everything up to it,
6493 and replace the index. */
6495 pref = TREE_OPERAND (addr, 0);
6496 ret = copy_node (pref);
6501 pref = TREE_OPERAND (pref, 0);
6502 TREE_OPERAND (pos, 0) = copy_node (pref);
6503 pos = TREE_OPERAND (pos, 0);
6506 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6507 fold_convert (itype,
6508 TREE_OPERAND (pos, 1)),
6509 fold_convert (itype, delta));
6511 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6515 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6516 means A >= Y && A != MAX, but in this case we know that
6517 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6520 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6522 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6524 if (TREE_CODE (bound) == LT_EXPR)
6525 a = TREE_OPERAND (bound, 0);
6526 else if (TREE_CODE (bound) == GT_EXPR)
6527 a = TREE_OPERAND (bound, 1);
6531 typea = TREE_TYPE (a);
6532 if (!INTEGRAL_TYPE_P (typea)
6533 && !POINTER_TYPE_P (typea))
6536 if (TREE_CODE (ineq) == LT_EXPR)
6538 a1 = TREE_OPERAND (ineq, 1);
6539 y = TREE_OPERAND (ineq, 0);
6541 else if (TREE_CODE (ineq) == GT_EXPR)
6543 a1 = TREE_OPERAND (ineq, 0);
6544 y = TREE_OPERAND (ineq, 1);
6549 if (TREE_TYPE (a1) != typea)
6552 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6553 if (!integer_onep (diff))
6556 return fold_build2 (GE_EXPR, type, a, y);
6559 /* Fold a sum or difference of at least one multiplication.
6560 Returns the folded tree or NULL if no simplification could be made. */
6563 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6565 tree arg00, arg01, arg10, arg11;
6566 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6568 /* (A * C) +- (B * C) -> (A+-B) * C.
6569 (A * C) +- A -> A * (C+-1).
6570 We are most concerned about the case where C is a constant,
6571 but other combinations show up during loop reduction. Since
6572 it is not difficult, try all four possibilities. */
6574 if (TREE_CODE (arg0) == MULT_EXPR)
6576 arg00 = TREE_OPERAND (arg0, 0);
6577 arg01 = TREE_OPERAND (arg0, 1);
6582 if (!FLOAT_TYPE_P (type))
6583 arg01 = build_int_cst (type, 1);
6585 arg01 = build_real (type, dconst1);
6587 if (TREE_CODE (arg1) == MULT_EXPR)
6589 arg10 = TREE_OPERAND (arg1, 0);
6590 arg11 = TREE_OPERAND (arg1, 1);
6595 if (!FLOAT_TYPE_P (type))
6596 arg11 = build_int_cst (type, 1);
6598 arg11 = build_real (type, dconst1);
6602 if (operand_equal_p (arg01, arg11, 0))
6603 same = arg01, alt0 = arg00, alt1 = arg10;
6604 else if (operand_equal_p (arg00, arg10, 0))
6605 same = arg00, alt0 = arg01, alt1 = arg11;
6606 else if (operand_equal_p (arg00, arg11, 0))
6607 same = arg00, alt0 = arg01, alt1 = arg10;
6608 else if (operand_equal_p (arg01, arg10, 0))
6609 same = arg01, alt0 = arg00, alt1 = arg11;
6611 /* No identical multiplicands; see if we can find a common
6612 power-of-two factor in non-power-of-two multiplies. This
6613 can help in multi-dimensional array access. */
6614 else if (host_integerp (arg01, 0)
6615 && host_integerp (arg11, 0))
6617 HOST_WIDE_INT int01, int11, tmp;
6620 int01 = TREE_INT_CST_LOW (arg01);
6621 int11 = TREE_INT_CST_LOW (arg11);
6623 /* Move min of absolute values to int11. */
6624 if ((int01 >= 0 ? int01 : -int01)
6625 < (int11 >= 0 ? int11 : -int11))
6627 tmp = int01, int01 = int11, int11 = tmp;
6628 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6635 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6637 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6638 build_int_cst (TREE_TYPE (arg00),
6643 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6648 return fold_build2 (MULT_EXPR, type,
6649 fold_build2 (code, type,
6650 fold_convert (type, alt0),
6651 fold_convert (type, alt1)),
6652 fold_convert (type, same));
6657 /* Fold a unary expression of code CODE and type TYPE with operand
6658 OP0. Return the folded expression if folding is successful.
6659 Otherwise, return NULL_TREE. */
6662 fold_unary (enum tree_code code, tree type, tree op0)
6666 enum tree_code_class kind = TREE_CODE_CLASS (code);
6668 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6669 && TREE_CODE_LENGTH (code) == 1);
6674 if (code == NOP_EXPR || code == CONVERT_EXPR
6675 || code == FLOAT_EXPR || code == ABS_EXPR)
6677 /* Don't use STRIP_NOPS, because signedness of argument type
6679 STRIP_SIGN_NOPS (arg0);
6683 /* Strip any conversions that don't change the mode. This
6684 is safe for every expression, except for a comparison
6685 expression because its signedness is derived from its
6688 Note that this is done as an internal manipulation within
6689 the constant folder, in order to find the simplest
6690 representation of the arguments so that their form can be
6691 studied. In any cases, the appropriate type conversions
6692 should be put back in the tree that will get out of the
6698 if (TREE_CODE_CLASS (code) == tcc_unary)
6700 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6701 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6702 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6703 else if (TREE_CODE (arg0) == COND_EXPR)
6705 tree arg01 = TREE_OPERAND (arg0, 1);
6706 tree arg02 = TREE_OPERAND (arg0, 2);
6707 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6708 arg01 = fold_build1 (code, type, arg01);
6709 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6710 arg02 = fold_build1 (code, type, arg02);
6711 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6714 /* If this was a conversion, and all we did was to move into
6715 inside the COND_EXPR, bring it back out. But leave it if
6716 it is a conversion from integer to integer and the
6717 result precision is no wider than a word since such a
6718 conversion is cheap and may be optimized away by combine,
6719 while it couldn't if it were outside the COND_EXPR. Then return
6720 so we don't get into an infinite recursion loop taking the
6721 conversion out and then back in. */
6723 if ((code == NOP_EXPR || code == CONVERT_EXPR
6724 || code == NON_LVALUE_EXPR)
6725 && TREE_CODE (tem) == COND_EXPR
6726 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6727 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6728 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6729 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6730 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6731 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6732 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6734 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6735 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6736 || flag_syntax_only))
6737 tem = build1 (code, type,
6739 TREE_TYPE (TREE_OPERAND
6740 (TREE_OPERAND (tem, 1), 0)),
6741 TREE_OPERAND (tem, 0),
6742 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6743 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6746 else if (COMPARISON_CLASS_P (arg0))
6748 if (TREE_CODE (type) == BOOLEAN_TYPE)
6750 arg0 = copy_node (arg0);
6751 TREE_TYPE (arg0) = type;
6754 else if (TREE_CODE (type) != INTEGER_TYPE)
6755 return fold_build3 (COND_EXPR, type, arg0,
6756 fold_build1 (code, type,
6758 fold_build1 (code, type,
6759 integer_zero_node));
6768 case FIX_TRUNC_EXPR:
6770 case FIX_FLOOR_EXPR:
6771 case FIX_ROUND_EXPR:
6772 if (TREE_TYPE (op0) == type)
6775 /* Handle cases of two conversions in a row. */
6776 if (TREE_CODE (op0) == NOP_EXPR
6777 || TREE_CODE (op0) == CONVERT_EXPR)
6779 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6780 tree inter_type = TREE_TYPE (op0);
6781 int inside_int = INTEGRAL_TYPE_P (inside_type);
6782 int inside_ptr = POINTER_TYPE_P (inside_type);
6783 int inside_float = FLOAT_TYPE_P (inside_type);
6784 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6785 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6786 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6787 int inter_int = INTEGRAL_TYPE_P (inter_type);
6788 int inter_ptr = POINTER_TYPE_P (inter_type);
6789 int inter_float = FLOAT_TYPE_P (inter_type);
6790 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6791 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6792 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6793 int final_int = INTEGRAL_TYPE_P (type);
6794 int final_ptr = POINTER_TYPE_P (type);
6795 int final_float = FLOAT_TYPE_P (type);
6796 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6797 unsigned int final_prec = TYPE_PRECISION (type);
6798 int final_unsignedp = TYPE_UNSIGNED (type);
6800 /* In addition to the cases of two conversions in a row
6801 handled below, if we are converting something to its own
6802 type via an object of identical or wider precision, neither
6803 conversion is needed. */
6804 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6805 && ((inter_int && final_int) || (inter_float && final_float))
6806 && inter_prec >= final_prec)
6807 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6809 /* Likewise, if the intermediate and final types are either both
6810 float or both integer, we don't need the middle conversion if
6811 it is wider than the final type and doesn't change the signedness
6812 (for integers). Avoid this if the final type is a pointer
6813 since then we sometimes need the inner conversion. Likewise if
6814 the outer has a precision not equal to the size of its mode. */
6815 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6816 || (inter_float && inside_float)
6817 || (inter_vec && inside_vec))
6818 && inter_prec >= inside_prec
6819 && (inter_float || inter_vec
6820 || inter_unsignedp == inside_unsignedp)
6821 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6822 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6824 && (! final_vec || inter_prec == inside_prec))
6825 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6827 /* If we have a sign-extension of a zero-extended value, we can
6828 replace that by a single zero-extension. */
6829 if (inside_int && inter_int && final_int
6830 && inside_prec < inter_prec && inter_prec < final_prec
6831 && inside_unsignedp && !inter_unsignedp)
6832 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6834 /* Two conversions in a row are not needed unless:
6835 - some conversion is floating-point (overstrict for now), or
6836 - some conversion is a vector (overstrict for now), or
6837 - the intermediate type is narrower than both initial and
6839 - the intermediate type and innermost type differ in signedness,
6840 and the outermost type is wider than the intermediate, or
6841 - the initial type is a pointer type and the precisions of the
6842 intermediate and final types differ, or
6843 - the final type is a pointer type and the precisions of the
6844 initial and intermediate types differ. */
6845 if (! inside_float && ! inter_float && ! final_float
6846 && ! inside_vec && ! inter_vec && ! final_vec
6847 && (inter_prec > inside_prec || inter_prec > final_prec)
6848 && ! (inside_int && inter_int
6849 && inter_unsignedp != inside_unsignedp
6850 && inter_prec < final_prec)
6851 && ((inter_unsignedp && inter_prec > inside_prec)
6852 == (final_unsignedp && final_prec > inter_prec))
6853 && ! (inside_ptr && inter_prec != final_prec)
6854 && ! (final_ptr && inside_prec != inter_prec)
6855 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6856 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6858 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6861 /* Handle (T *)&A.B.C for A being of type T and B and C
6862 living at offset zero. This occurs frequently in
6863 C++ upcasting and then accessing the base. */
6864 if (TREE_CODE (op0) == ADDR_EXPR
6865 && POINTER_TYPE_P (type)
6866 && handled_component_p (TREE_OPERAND (op0, 0)))
6868 HOST_WIDE_INT bitsize, bitpos;
6870 enum machine_mode mode;
6871 int unsignedp, volatilep;
6872 tree base = TREE_OPERAND (op0, 0);
6873 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6874 &mode, &unsignedp, &volatilep, false);
6875 /* If the reference was to a (constant) zero offset, we can use
6876 the address of the base if it has the same base type
6877 as the result type. */
6878 if (! offset && bitpos == 0
6879 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6880 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6881 return fold_convert (type, build_fold_addr_expr (base));
6884 if (TREE_CODE (op0) == MODIFY_EXPR
6885 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6886 /* Detect assigning a bitfield. */
6887 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6888 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6890 /* Don't leave an assignment inside a conversion
6891 unless assigning a bitfield. */
6892 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6893 /* First do the assignment, then return converted constant. */
6894 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6895 TREE_NO_WARNING (tem) = 1;
6896 TREE_USED (tem) = 1;
6900 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6901 constants (if x has signed type, the sign bit cannot be set
6902 in c). This folds extension into the BIT_AND_EXPR. */
6903 if (INTEGRAL_TYPE_P (type)
6904 && TREE_CODE (type) != BOOLEAN_TYPE
6905 && TREE_CODE (op0) == BIT_AND_EXPR
6906 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6909 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6912 if (TYPE_UNSIGNED (TREE_TYPE (and))
6913 || (TYPE_PRECISION (type)
6914 <= TYPE_PRECISION (TREE_TYPE (and))))
6916 else if (TYPE_PRECISION (TREE_TYPE (and1))
6917 <= HOST_BITS_PER_WIDE_INT
6918 && host_integerp (and1, 1))
6920 unsigned HOST_WIDE_INT cst;
6922 cst = tree_low_cst (and1, 1);
6923 cst &= (HOST_WIDE_INT) -1
6924 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6925 change = (cst == 0);
6926 #ifdef LOAD_EXTEND_OP
6928 && !flag_syntax_only
6929 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6932 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6933 and0 = fold_convert (uns, and0);
6934 and1 = fold_convert (uns, and1);
6940 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6941 TREE_INT_CST_HIGH (and1));
6942 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6943 TREE_CONSTANT_OVERFLOW (and1));
6944 return fold_build2 (BIT_AND_EXPR, type,
6945 fold_convert (type, and0), tem);
6949 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6950 T2 being pointers to types of the same size. */
6951 if (POINTER_TYPE_P (type)
6952 && BINARY_CLASS_P (arg0)
6953 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6954 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6956 tree arg00 = TREE_OPERAND (arg0, 0);
6958 tree t1 = TREE_TYPE (arg00);
6959 tree tt0 = TREE_TYPE (t0);
6960 tree tt1 = TREE_TYPE (t1);
6961 tree s0 = TYPE_SIZE (tt0);
6962 tree s1 = TYPE_SIZE (tt1);
6964 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6965 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6966 TREE_OPERAND (arg0, 1));
6969 tem = fold_convert_const (code, type, arg0);
6970 return tem ? tem : NULL_TREE;
6972 case VIEW_CONVERT_EXPR:
6973 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6974 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6978 if (negate_expr_p (arg0))
6979 return fold_convert (type, negate_expr (arg0));
6980 /* Convert - (~A) to A + 1. */
6981 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6982 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6983 build_int_cst (type, 1));
6987 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6988 return fold_abs_const (arg0, type);
6989 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6990 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6991 /* Convert fabs((double)float) into (double)fabsf(float). */
6992 else if (TREE_CODE (arg0) == NOP_EXPR
6993 && TREE_CODE (type) == REAL_TYPE)
6995 tree targ0 = strip_float_extensions (arg0);
6997 return fold_convert (type, fold_build1 (ABS_EXPR,
7001 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7002 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7005 /* Strip sign ops from argument. */
7006 if (TREE_CODE (type) == REAL_TYPE)
7008 tem = fold_strip_sign_ops (arg0);
7010 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7015 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7016 return fold_convert (type, arg0);
7017 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7018 return build2 (COMPLEX_EXPR, type,
7019 TREE_OPERAND (arg0, 0),
7020 negate_expr (TREE_OPERAND (arg0, 1)));
7021 else if (TREE_CODE (arg0) == COMPLEX_CST)
7022 return build_complex (type, TREE_REALPART (arg0),
7023 negate_expr (TREE_IMAGPART (arg0)));
7024 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7025 return fold_build2 (TREE_CODE (arg0), type,
7026 fold_build1 (CONJ_EXPR, type,
7027 TREE_OPERAND (arg0, 0)),
7028 fold_build1 (CONJ_EXPR, type,
7029 TREE_OPERAND (arg0, 1)));
7030 else if (TREE_CODE (arg0) == CONJ_EXPR)
7031 return TREE_OPERAND (arg0, 0);
7035 if (TREE_CODE (arg0) == INTEGER_CST)
7036 return fold_not_const (arg0, type);
7037 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7038 return TREE_OPERAND (arg0, 0);
7039 /* Convert ~ (-A) to A - 1. */
7040 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7041 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7042 build_int_cst (type, 1));
7043 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7044 else if (INTEGRAL_TYPE_P (type)
7045 && ((TREE_CODE (arg0) == MINUS_EXPR
7046 && integer_onep (TREE_OPERAND (arg0, 1)))
7047 || (TREE_CODE (arg0) == PLUS_EXPR
7048 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7049 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7050 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7051 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7052 && (tem = fold_unary (BIT_NOT_EXPR, type,
7054 TREE_OPERAND (arg0, 0)))))
7055 return fold_build2 (BIT_XOR_EXPR, type, tem,
7056 fold_convert (type, TREE_OPERAND (arg0, 1)));
7057 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7058 && (tem = fold_unary (BIT_NOT_EXPR, type,
7060 TREE_OPERAND (arg0, 1)))))
7061 return fold_build2 (BIT_XOR_EXPR, type,
7062 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7066 case TRUTH_NOT_EXPR:
7067 /* The argument to invert_truthvalue must have Boolean type. */
7068 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7069 arg0 = fold_convert (boolean_type_node, arg0);
7071 /* Note that the operand of this must be an int
7072 and its values must be 0 or 1.
7073 ("true" is a fixed value perhaps depending on the language,
7074 but we don't handle values other than 1 correctly yet.) */
7075 tem = invert_truthvalue (arg0);
7076 /* Avoid infinite recursion. */
7077 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7079 return fold_convert (type, tem);
7082 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7084 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7085 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7086 TREE_OPERAND (arg0, 1));
7087 else if (TREE_CODE (arg0) == COMPLEX_CST)
7088 return TREE_REALPART (arg0);
7089 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7090 return fold_build2 (TREE_CODE (arg0), type,
7091 fold_build1 (REALPART_EXPR, type,
7092 TREE_OPERAND (arg0, 0)),
7093 fold_build1 (REALPART_EXPR, type,
7094 TREE_OPERAND (arg0, 1)));
7098 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7099 return fold_convert (type, integer_zero_node);
7100 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7101 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7102 TREE_OPERAND (arg0, 0));
7103 else if (TREE_CODE (arg0) == COMPLEX_CST)
7104 return TREE_IMAGPART (arg0);
7105 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7106 return fold_build2 (TREE_CODE (arg0), type,
7107 fold_build1 (IMAGPART_EXPR, type,
7108 TREE_OPERAND (arg0, 0)),
7109 fold_build1 (IMAGPART_EXPR, type,
7110 TREE_OPERAND (arg0, 1)));
7115 } /* switch (code) */
7118 /* Fold a binary expression of code CODE and type TYPE with operands
7119 OP0 and OP1. Return the folded expression if folding is
7120 successful. Otherwise, return NULL_TREE. */
7123 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7125 tree t1 = NULL_TREE;
7127 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7128 enum tree_code_class kind = TREE_CODE_CLASS (code);
7130 /* WINS will be nonzero when the switch is done
7131 if all operands are constant. */
7134 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7135 && TREE_CODE_LENGTH (code) == 2);
7144 /* Strip any conversions that don't change the mode. This is
7145 safe for every expression, except for a comparison expression
7146 because its signedness is derived from its operands. So, in
7147 the latter case, only strip conversions that don't change the
7150 Note that this is done as an internal manipulation within the
7151 constant folder, in order to find the simplest representation
7152 of the arguments so that their form can be studied. In any
7153 cases, the appropriate type conversions should be put back in
7154 the tree that will get out of the constant folder. */
7155 if (kind == tcc_comparison)
7156 STRIP_SIGN_NOPS (arg0);
7160 if (TREE_CODE (arg0) == COMPLEX_CST)
7161 subop = TREE_REALPART (arg0);
7165 if (TREE_CODE (subop) != INTEGER_CST
7166 && TREE_CODE (subop) != REAL_CST)
7167 /* Note that TREE_CONSTANT isn't enough:
7168 static var addresses are constant but we can't
7169 do arithmetic on them. */
7177 /* Strip any conversions that don't change the mode. This is
7178 safe for every expression, except for a comparison expression
7179 because its signedness is derived from its operands. So, in
7180 the latter case, only strip conversions that don't change the
7183 Note that this is done as an internal manipulation within the
7184 constant folder, in order to find the simplest representation
7185 of the arguments so that their form can be studied. In any
7186 cases, the appropriate type conversions should be put back in
7187 the tree that will get out of the constant folder. */
7188 if (kind == tcc_comparison)
7189 STRIP_SIGN_NOPS (arg1);
7193 if (TREE_CODE (arg1) == COMPLEX_CST)
7194 subop = TREE_REALPART (arg1);
7198 if (TREE_CODE (subop) != INTEGER_CST
7199 && TREE_CODE (subop) != REAL_CST)
7200 /* Note that TREE_CONSTANT isn't enough:
7201 static var addresses are constant but we can't
7202 do arithmetic on them. */
7206 /* If this is a commutative operation, and ARG0 is a constant, move it
7207 to ARG1 to reduce the number of tests below. */
7208 if (commutative_tree_code (code)
7209 && tree_swap_operands_p (arg0, arg1, true))
7210 return fold_build2 (code, type, op1, op0);
7212 /* Now WINS is set as described above,
7213 ARG0 is the first operand of EXPR,
7214 and ARG1 is the second operand (if it has more than one operand).
7216 First check for cases where an arithmetic operation is applied to a
7217 compound, conditional, or comparison operation. Push the arithmetic
7218 operation inside the compound or conditional to see if any folding
7219 can then be done. Convert comparison to conditional for this purpose.
7220 The also optimizes non-constant cases that used to be done in
7223 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7224 one of the operands is a comparison and the other is a comparison, a
7225 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7226 code below would make the expression more complex. Change it to a
7227 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7228 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7230 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7231 || code == EQ_EXPR || code == NE_EXPR)
7232 && ((truth_value_p (TREE_CODE (arg0))
7233 && (truth_value_p (TREE_CODE (arg1))
7234 || (TREE_CODE (arg1) == BIT_AND_EXPR
7235 && integer_onep (TREE_OPERAND (arg1, 1)))))
7236 || (truth_value_p (TREE_CODE (arg1))
7237 && (truth_value_p (TREE_CODE (arg0))
7238 || (TREE_CODE (arg0) == BIT_AND_EXPR
7239 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7241 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7242 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7245 fold_convert (boolean_type_node, arg0),
7246 fold_convert (boolean_type_node, arg1));
7248 if (code == EQ_EXPR)
7249 tem = invert_truthvalue (tem);
7251 return fold_convert (type, tem);
7254 if (TREE_CODE_CLASS (code) == tcc_binary
7255 || TREE_CODE_CLASS (code) == tcc_comparison)
7257 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7258 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7259 fold_build2 (code, type,
7260 TREE_OPERAND (arg0, 1), op1));
7261 if (TREE_CODE (arg1) == COMPOUND_EXPR
7262 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7263 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7264 fold_build2 (code, type,
7265 op0, TREE_OPERAND (arg1, 1)));
7267 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7269 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7271 /*cond_first_p=*/1);
7272 if (tem != NULL_TREE)
7276 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7278 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7280 /*cond_first_p=*/0);
7281 if (tem != NULL_TREE)
7289 /* A + (-B) -> A - B */
7290 if (TREE_CODE (arg1) == NEGATE_EXPR)
7291 return fold_build2 (MINUS_EXPR, type,
7292 fold_convert (type, arg0),
7293 fold_convert (type, TREE_OPERAND (arg1, 0)));
7294 /* (-A) + B -> B - A */
7295 if (TREE_CODE (arg0) == NEGATE_EXPR
7296 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7297 return fold_build2 (MINUS_EXPR, type,
7298 fold_convert (type, arg1),
7299 fold_convert (type, TREE_OPERAND (arg0, 0)));
7300 /* Convert ~A + 1 to -A. */
7301 if (INTEGRAL_TYPE_P (type)
7302 && TREE_CODE (arg0) == BIT_NOT_EXPR
7303 && integer_onep (arg1))
7304 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7306 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7308 if ((TREE_CODE (arg0) == MULT_EXPR
7309 || TREE_CODE (arg1) == MULT_EXPR)
7310 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7312 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7317 if (! FLOAT_TYPE_P (type))
7319 if (integer_zerop (arg1))
7320 return non_lvalue (fold_convert (type, arg0));
7322 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7323 with a constant, and the two constants have no bits in common,
7324 we should treat this as a BIT_IOR_EXPR since this may produce more
7326 if (TREE_CODE (arg0) == BIT_AND_EXPR
7327 && TREE_CODE (arg1) == BIT_AND_EXPR
7328 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7329 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7330 && integer_zerop (const_binop (BIT_AND_EXPR,
7331 TREE_OPERAND (arg0, 1),
7332 TREE_OPERAND (arg1, 1), 0)))
7334 code = BIT_IOR_EXPR;
7338 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7339 (plus (plus (mult) (mult)) (foo)) so that we can
7340 take advantage of the factoring cases below. */
7341 if (((TREE_CODE (arg0) == PLUS_EXPR
7342 || TREE_CODE (arg0) == MINUS_EXPR)
7343 && TREE_CODE (arg1) == MULT_EXPR)
7344 || ((TREE_CODE (arg1) == PLUS_EXPR
7345 || TREE_CODE (arg1) == MINUS_EXPR)
7346 && TREE_CODE (arg0) == MULT_EXPR))
7348 tree parg0, parg1, parg, marg;
7349 enum tree_code pcode;
7351 if (TREE_CODE (arg1) == MULT_EXPR)
7352 parg = arg0, marg = arg1;
7354 parg = arg1, marg = arg0;
7355 pcode = TREE_CODE (parg);
7356 parg0 = TREE_OPERAND (parg, 0);
7357 parg1 = TREE_OPERAND (parg, 1);
7361 if (TREE_CODE (parg0) == MULT_EXPR
7362 && TREE_CODE (parg1) != MULT_EXPR)
7363 return fold_build2 (pcode, type,
7364 fold_build2 (PLUS_EXPR, type,
7365 fold_convert (type, parg0),
7366 fold_convert (type, marg)),
7367 fold_convert (type, parg1));
7368 if (TREE_CODE (parg0) != MULT_EXPR
7369 && TREE_CODE (parg1) == MULT_EXPR)
7370 return fold_build2 (PLUS_EXPR, type,
7371 fold_convert (type, parg0),
7372 fold_build2 (pcode, type,
7373 fold_convert (type, marg),
7378 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7379 of the array. Loop optimizer sometimes produce this type of
7381 if (TREE_CODE (arg0) == ADDR_EXPR)
7383 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7385 return fold_convert (type, tem);
7387 else if (TREE_CODE (arg1) == ADDR_EXPR)
7389 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7391 return fold_convert (type, tem);
7396 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7397 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7398 return non_lvalue (fold_convert (type, arg0));
7400 /* Likewise if the operands are reversed. */
7401 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7402 return non_lvalue (fold_convert (type, arg1));
7404 /* Convert X + -C into X - C. */
7405 if (TREE_CODE (arg1) == REAL_CST
7406 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7408 tem = fold_negate_const (arg1, type);
7409 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7410 return fold_build2 (MINUS_EXPR, type,
7411 fold_convert (type, arg0),
7412 fold_convert (type, tem));
7415 if (flag_unsafe_math_optimizations
7416 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7417 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7418 && (tem = distribute_real_division (code, type, arg0, arg1)))
7421 /* Convert x+x into x*2.0. */
7422 if (operand_equal_p (arg0, arg1, 0)
7423 && SCALAR_FLOAT_TYPE_P (type))
7424 return fold_build2 (MULT_EXPR, type, arg0,
7425 build_real (type, dconst2));
7427 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7428 if (flag_unsafe_math_optimizations
7429 && TREE_CODE (arg1) == PLUS_EXPR
7430 && TREE_CODE (arg0) != MULT_EXPR)
7432 tree tree10 = TREE_OPERAND (arg1, 0);
7433 tree tree11 = TREE_OPERAND (arg1, 1);
7434 if (TREE_CODE (tree11) == MULT_EXPR
7435 && TREE_CODE (tree10) == MULT_EXPR)
7438 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7439 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7442 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7443 if (flag_unsafe_math_optimizations
7444 && TREE_CODE (arg0) == PLUS_EXPR
7445 && TREE_CODE (arg1) != MULT_EXPR)
7447 tree tree00 = TREE_OPERAND (arg0, 0);
7448 tree tree01 = TREE_OPERAND (arg0, 1);
7449 if (TREE_CODE (tree01) == MULT_EXPR
7450 && TREE_CODE (tree00) == MULT_EXPR)
7453 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7454 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7460 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7461 is a rotate of A by C1 bits. */
7462 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7463 is a rotate of A by B bits. */
7465 enum tree_code code0, code1;
7466 code0 = TREE_CODE (arg0);
7467 code1 = TREE_CODE (arg1);
7468 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7469 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7470 && operand_equal_p (TREE_OPERAND (arg0, 0),
7471 TREE_OPERAND (arg1, 0), 0)
7472 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7474 tree tree01, tree11;
7475 enum tree_code code01, code11;
7477 tree01 = TREE_OPERAND (arg0, 1);
7478 tree11 = TREE_OPERAND (arg1, 1);
7479 STRIP_NOPS (tree01);
7480 STRIP_NOPS (tree11);
7481 code01 = TREE_CODE (tree01);
7482 code11 = TREE_CODE (tree11);
7483 if (code01 == INTEGER_CST
7484 && code11 == INTEGER_CST
7485 && TREE_INT_CST_HIGH (tree01) == 0
7486 && TREE_INT_CST_HIGH (tree11) == 0
7487 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7488 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7489 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7490 code0 == LSHIFT_EXPR ? tree01 : tree11);
7491 else if (code11 == MINUS_EXPR)
7493 tree tree110, tree111;
7494 tree110 = TREE_OPERAND (tree11, 0);
7495 tree111 = TREE_OPERAND (tree11, 1);
7496 STRIP_NOPS (tree110);
7497 STRIP_NOPS (tree111);
7498 if (TREE_CODE (tree110) == INTEGER_CST
7499 && 0 == compare_tree_int (tree110,
7501 (TREE_TYPE (TREE_OPERAND
7503 && operand_equal_p (tree01, tree111, 0))
7504 return build2 ((code0 == LSHIFT_EXPR
7507 type, TREE_OPERAND (arg0, 0), tree01);
7509 else if (code01 == MINUS_EXPR)
7511 tree tree010, tree011;
7512 tree010 = TREE_OPERAND (tree01, 0);
7513 tree011 = TREE_OPERAND (tree01, 1);
7514 STRIP_NOPS (tree010);
7515 STRIP_NOPS (tree011);
7516 if (TREE_CODE (tree010) == INTEGER_CST
7517 && 0 == compare_tree_int (tree010,
7519 (TREE_TYPE (TREE_OPERAND
7521 && operand_equal_p (tree11, tree011, 0))
7522 return build2 ((code0 != LSHIFT_EXPR
7525 type, TREE_OPERAND (arg0, 0), tree11);
7531 /* In most languages, can't associate operations on floats through
7532 parentheses. Rather than remember where the parentheses were, we
7533 don't associate floats at all, unless the user has specified
7534 -funsafe-math-optimizations. */
7537 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7539 tree var0, con0, lit0, minus_lit0;
7540 tree var1, con1, lit1, minus_lit1;
7542 /* Split both trees into variables, constants, and literals. Then
7543 associate each group together, the constants with literals,
7544 then the result with variables. This increases the chances of
7545 literals being recombined later and of generating relocatable
7546 expressions for the sum of a constant and literal. */
7547 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7548 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7549 code == MINUS_EXPR);
7551 /* Only do something if we found more than two objects. Otherwise,
7552 nothing has changed and we risk infinite recursion. */
7553 if (2 < ((var0 != 0) + (var1 != 0)
7554 + (con0 != 0) + (con1 != 0)
7555 + (lit0 != 0) + (lit1 != 0)
7556 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7558 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7559 if (code == MINUS_EXPR)
7562 var0 = associate_trees (var0, var1, code, type);
7563 con0 = associate_trees (con0, con1, code, type);
7564 lit0 = associate_trees (lit0, lit1, code, type);
7565 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7567 /* Preserve the MINUS_EXPR if the negative part of the literal is
7568 greater than the positive part. Otherwise, the multiplicative
7569 folding code (i.e extract_muldiv) may be fooled in case
7570 unsigned constants are subtracted, like in the following
7571 example: ((X*2 + 4) - 8U)/2. */
7572 if (minus_lit0 && lit0)
7574 if (TREE_CODE (lit0) == INTEGER_CST
7575 && TREE_CODE (minus_lit0) == INTEGER_CST
7576 && tree_int_cst_lt (lit0, minus_lit0))
7578 minus_lit0 = associate_trees (minus_lit0, lit0,
7584 lit0 = associate_trees (lit0, minus_lit0,
7592 return fold_convert (type,
7593 associate_trees (var0, minus_lit0,
7597 con0 = associate_trees (con0, minus_lit0,
7599 return fold_convert (type,
7600 associate_trees (var0, con0,
7605 con0 = associate_trees (con0, lit0, code, type);
7606 return fold_convert (type, associate_trees (var0, con0,
7613 t1 = const_binop (code, arg0, arg1, 0);
7614 if (t1 != NULL_TREE)
7616 /* The return value should always have
7617 the same type as the original expression. */
7618 if (TREE_TYPE (t1) != type)
7619 t1 = fold_convert (type, t1);
7626 /* A - (-B) -> A + B */
7627 if (TREE_CODE (arg1) == NEGATE_EXPR)
7628 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7629 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7630 if (TREE_CODE (arg0) == NEGATE_EXPR
7631 && (FLOAT_TYPE_P (type)
7632 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7633 && negate_expr_p (arg1)
7634 && reorder_operands_p (arg0, arg1))
7635 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7636 TREE_OPERAND (arg0, 0));
7637 /* Convert -A - 1 to ~A. */
7638 if (INTEGRAL_TYPE_P (type)
7639 && TREE_CODE (arg0) == NEGATE_EXPR
7640 && integer_onep (arg1))
7641 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7643 /* Convert -1 - A to ~A. */
7644 if (INTEGRAL_TYPE_P (type)
7645 && integer_all_onesp (arg0))
7646 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7648 if (! FLOAT_TYPE_P (type))
7650 if (! wins && integer_zerop (arg0))
7651 return negate_expr (fold_convert (type, arg1));
7652 if (integer_zerop (arg1))
7653 return non_lvalue (fold_convert (type, arg0));
7655 /* Fold A - (A & B) into ~B & A. */
7656 if (!TREE_SIDE_EFFECTS (arg0)
7657 && TREE_CODE (arg1) == BIT_AND_EXPR)
7659 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7660 return fold_build2 (BIT_AND_EXPR, type,
7661 fold_build1 (BIT_NOT_EXPR, type,
7662 TREE_OPERAND (arg1, 0)),
7664 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7665 return fold_build2 (BIT_AND_EXPR, type,
7666 fold_build1 (BIT_NOT_EXPR, type,
7667 TREE_OPERAND (arg1, 1)),
7671 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7672 any power of 2 minus 1. */
7673 if (TREE_CODE (arg0) == BIT_AND_EXPR
7674 && TREE_CODE (arg1) == BIT_AND_EXPR
7675 && operand_equal_p (TREE_OPERAND (arg0, 0),
7676 TREE_OPERAND (arg1, 0), 0))
7678 tree mask0 = TREE_OPERAND (arg0, 1);
7679 tree mask1 = TREE_OPERAND (arg1, 1);
7680 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7682 if (operand_equal_p (tem, mask1, 0))
7684 tem = fold_build2 (BIT_XOR_EXPR, type,
7685 TREE_OPERAND (arg0, 0), mask1);
7686 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7691 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7692 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7693 return non_lvalue (fold_convert (type, arg0));
7695 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7696 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7697 (-ARG1 + ARG0) reduces to -ARG1. */
7698 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7699 return negate_expr (fold_convert (type, arg1));
7701 /* Fold &x - &x. This can happen from &x.foo - &x.
7702 This is unsafe for certain floats even in non-IEEE formats.
7703 In IEEE, it is unsafe because it does wrong for NaNs.
7704 Also note that operand_equal_p is always false if an operand
7707 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7708 && operand_equal_p (arg0, arg1, 0))
7709 return fold_convert (type, integer_zero_node);
7711 /* A - B -> A + (-B) if B is easily negatable. */
7712 if (!wins && negate_expr_p (arg1)
7713 && ((FLOAT_TYPE_P (type)
7714 /* Avoid this transformation if B is a positive REAL_CST. */
7715 && (TREE_CODE (arg1) != REAL_CST
7716 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7717 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7718 return fold_build2 (PLUS_EXPR, type,
7719 fold_convert (type, arg0),
7720 fold_convert (type, negate_expr (arg1)));
7722 /* Try folding difference of addresses. */
7726 if ((TREE_CODE (arg0) == ADDR_EXPR
7727 || TREE_CODE (arg1) == ADDR_EXPR)
7728 && ptr_difference_const (arg0, arg1, &diff))
7729 return build_int_cst_type (type, diff);
7732 /* Fold &a[i] - &a[j] to i-j. */
7733 if (TREE_CODE (arg0) == ADDR_EXPR
7734 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7735 && TREE_CODE (arg1) == ADDR_EXPR
7736 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7738 tree aref0 = TREE_OPERAND (arg0, 0);
7739 tree aref1 = TREE_OPERAND (arg1, 0);
7740 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7741 TREE_OPERAND (aref1, 0), 0))
7743 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7744 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7745 tree esz = array_ref_element_size (aref0);
7746 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7747 return fold_build2 (MULT_EXPR, type, diff,
7748 fold_convert (type, esz));
7753 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7754 of the array. Loop optimizer sometimes produce this type of
7756 if (TREE_CODE (arg0) == ADDR_EXPR)
7758 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7760 return fold_convert (type, tem);
7763 if (flag_unsafe_math_optimizations
7764 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7765 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7766 && (tem = distribute_real_division (code, type, arg0, arg1)))
7769 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7771 if ((TREE_CODE (arg0) == MULT_EXPR
7772 || TREE_CODE (arg1) == MULT_EXPR)
7773 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7775 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7783 /* (-A) * (-B) -> A * B */
7784 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7785 return fold_build2 (MULT_EXPR, type,
7786 TREE_OPERAND (arg0, 0),
7787 negate_expr (arg1));
7788 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7789 return fold_build2 (MULT_EXPR, type,
7791 TREE_OPERAND (arg1, 0));
7793 if (! FLOAT_TYPE_P (type))
7795 if (integer_zerop (arg1))
7796 return omit_one_operand (type, arg1, arg0);
7797 if (integer_onep (arg1))
7798 return non_lvalue (fold_convert (type, arg0));
7799 /* Transform x * -1 into -x. */
7800 if (integer_all_onesp (arg1))
7801 return fold_convert (type, negate_expr (arg0));
7803 /* (a * (1 << b)) is (a << b) */
7804 if (TREE_CODE (arg1) == LSHIFT_EXPR
7805 && integer_onep (TREE_OPERAND (arg1, 0)))
7806 return fold_build2 (LSHIFT_EXPR, type, arg0,
7807 TREE_OPERAND (arg1, 1));
7808 if (TREE_CODE (arg0) == LSHIFT_EXPR
7809 && integer_onep (TREE_OPERAND (arg0, 0)))
7810 return fold_build2 (LSHIFT_EXPR, type, arg1,
7811 TREE_OPERAND (arg0, 1));
7813 if (TREE_CODE (arg1) == INTEGER_CST
7814 && 0 != (tem = extract_muldiv (op0,
7815 fold_convert (type, arg1),
7817 return fold_convert (type, tem);
7822 /* Maybe fold x * 0 to 0. The expressions aren't the same
7823 when x is NaN, since x * 0 is also NaN. Nor are they the
7824 same in modes with signed zeros, since multiplying a
7825 negative value by 0 gives -0, not +0. */
7826 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7827 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7828 && real_zerop (arg1))
7829 return omit_one_operand (type, arg1, arg0);
7830 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7831 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7832 && real_onep (arg1))
7833 return non_lvalue (fold_convert (type, arg0));
7835 /* Transform x * -1.0 into -x. */
7836 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7837 && real_minus_onep (arg1))
7838 return fold_convert (type, negate_expr (arg0));
7840 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7841 if (flag_unsafe_math_optimizations
7842 && TREE_CODE (arg0) == RDIV_EXPR
7843 && TREE_CODE (arg1) == REAL_CST
7844 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7846 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7849 return fold_build2 (RDIV_EXPR, type, tem,
7850 TREE_OPERAND (arg0, 1));
7853 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7854 if (operand_equal_p (arg0, arg1, 0))
7856 tree tem = fold_strip_sign_ops (arg0);
7857 if (tem != NULL_TREE)
7859 tem = fold_convert (type, tem);
7860 return fold_build2 (MULT_EXPR, type, tem, tem);
7864 if (flag_unsafe_math_optimizations)
7866 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7867 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7869 /* Optimizations of root(...)*root(...). */
7870 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7872 tree rootfn, arg, arglist;
7873 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7874 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7876 /* Optimize sqrt(x)*sqrt(x) as x. */
7877 if (BUILTIN_SQRT_P (fcode0)
7878 && operand_equal_p (arg00, arg10, 0)
7879 && ! HONOR_SNANS (TYPE_MODE (type)))
7882 /* Optimize root(x)*root(y) as root(x*y). */
7883 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7884 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7885 arglist = build_tree_list (NULL_TREE, arg);
7886 return build_function_call_expr (rootfn, arglist);
7889 /* Optimize expN(x)*expN(y) as expN(x+y). */
7890 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7892 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7893 tree arg = fold_build2 (PLUS_EXPR, type,
7894 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7895 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7896 tree arglist = build_tree_list (NULL_TREE, arg);
7897 return build_function_call_expr (expfn, arglist);
7900 /* Optimizations of pow(...)*pow(...). */
7901 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7902 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7903 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7905 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7906 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7908 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7909 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7912 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7913 if (operand_equal_p (arg01, arg11, 0))
7915 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7916 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7917 tree arglist = tree_cons (NULL_TREE, arg,
7918 build_tree_list (NULL_TREE,
7920 return build_function_call_expr (powfn, arglist);
7923 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7924 if (operand_equal_p (arg00, arg10, 0))
7926 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7927 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7928 tree arglist = tree_cons (NULL_TREE, arg00,
7929 build_tree_list (NULL_TREE,
7931 return build_function_call_expr (powfn, arglist);
7935 /* Optimize tan(x)*cos(x) as sin(x). */
7936 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7937 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7938 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7939 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7940 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7941 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7942 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7943 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7945 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7947 if (sinfn != NULL_TREE)
7948 return build_function_call_expr (sinfn,
7949 TREE_OPERAND (arg0, 1));
7952 /* Optimize x*pow(x,c) as pow(x,c+1). */
7953 if (fcode1 == BUILT_IN_POW
7954 || fcode1 == BUILT_IN_POWF
7955 || fcode1 == BUILT_IN_POWL)
7957 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7958 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7960 if (TREE_CODE (arg11) == REAL_CST
7961 && ! TREE_CONSTANT_OVERFLOW (arg11)
7962 && operand_equal_p (arg0, arg10, 0))
7964 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7968 c = TREE_REAL_CST (arg11);
7969 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7970 arg = build_real (type, c);
7971 arglist = build_tree_list (NULL_TREE, arg);
7972 arglist = tree_cons (NULL_TREE, arg0, arglist);
7973 return build_function_call_expr (powfn, arglist);
7977 /* Optimize pow(x,c)*x as pow(x,c+1). */
7978 if (fcode0 == BUILT_IN_POW
7979 || fcode0 == BUILT_IN_POWF
7980 || fcode0 == BUILT_IN_POWL)
7982 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7983 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7985 if (TREE_CODE (arg01) == REAL_CST
7986 && ! TREE_CONSTANT_OVERFLOW (arg01)
7987 && operand_equal_p (arg1, arg00, 0))
7989 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7993 c = TREE_REAL_CST (arg01);
7994 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7995 arg = build_real (type, c);
7996 arglist = build_tree_list (NULL_TREE, arg);
7997 arglist = tree_cons (NULL_TREE, arg1, arglist);
7998 return build_function_call_expr (powfn, arglist);
8002 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8004 && operand_equal_p (arg0, arg1, 0))
8006 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8010 tree arg = build_real (type, dconst2);
8011 tree arglist = build_tree_list (NULL_TREE, arg);
8012 arglist = tree_cons (NULL_TREE, arg0, arglist);
8013 return build_function_call_expr (powfn, arglist);
8022 if (integer_all_onesp (arg1))
8023 return omit_one_operand (type, arg1, arg0);
8024 if (integer_zerop (arg1))
8025 return non_lvalue (fold_convert (type, arg0));
8026 if (operand_equal_p (arg0, arg1, 0))
8027 return non_lvalue (fold_convert (type, arg0));
8030 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8031 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8033 t1 = build_int_cst (type, -1);
8034 t1 = force_fit_type (t1, 0, false, false);
8035 return omit_one_operand (type, t1, arg1);
8039 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8040 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8042 t1 = build_int_cst (type, -1);
8043 t1 = force_fit_type (t1, 0, false, false);
8044 return omit_one_operand (type, t1, arg0);
8047 t1 = distribute_bit_expr (code, type, arg0, arg1);
8048 if (t1 != NULL_TREE)
8051 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8053 This results in more efficient code for machines without a NAND
8054 instruction. Combine will canonicalize to the first form
8055 which will allow use of NAND instructions provided by the
8056 backend if they exist. */
8057 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8058 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8060 return fold_build1 (BIT_NOT_EXPR, type,
8061 build2 (BIT_AND_EXPR, type,
8062 TREE_OPERAND (arg0, 0),
8063 TREE_OPERAND (arg1, 0)));
8066 /* See if this can be simplified into a rotate first. If that
8067 is unsuccessful continue in the association code. */
8071 if (integer_zerop (arg1))
8072 return non_lvalue (fold_convert (type, arg0));
8073 if (integer_all_onesp (arg1))
8074 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8075 if (operand_equal_p (arg0, arg1, 0))
8076 return omit_one_operand (type, integer_zero_node, arg0);
8079 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8080 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8082 t1 = build_int_cst (type, -1);
8083 t1 = force_fit_type (t1, 0, false, false);
8084 return omit_one_operand (type, t1, arg1);
8088 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8089 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8091 t1 = build_int_cst (type, -1);
8092 t1 = force_fit_type (t1, 0, false, false);
8093 return omit_one_operand (type, t1, arg0);
8096 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8097 with a constant, and the two constants have no bits in common,
8098 we should treat this as a BIT_IOR_EXPR since this may produce more
8100 if (TREE_CODE (arg0) == BIT_AND_EXPR
8101 && TREE_CODE (arg1) == BIT_AND_EXPR
8102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8103 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8104 && integer_zerop (const_binop (BIT_AND_EXPR,
8105 TREE_OPERAND (arg0, 1),
8106 TREE_OPERAND (arg1, 1), 0)))
8108 code = BIT_IOR_EXPR;
8112 /* (X | Y) ^ X -> Y & ~ X*/
8113 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8114 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8116 tree t2 = TREE_OPERAND (arg0, 1);
8117 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8119 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8120 fold_convert (type, t1));
8124 /* (Y | X) ^ X -> Y & ~ X*/
8125 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8126 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8128 tree t2 = TREE_OPERAND (arg0, 0);
8129 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8131 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8132 fold_convert (type, t1));
8136 /* X ^ (X | Y) -> Y & ~ X*/
8137 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8138 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8140 tree t2 = TREE_OPERAND (arg1, 1);
8141 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8143 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8144 fold_convert (type, t1));
8148 /* X ^ (Y | X) -> Y & ~ X*/
8149 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8150 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8152 tree t2 = TREE_OPERAND (arg1, 0);
8153 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8155 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8156 fold_convert (type, t1));
8160 /* Convert ~X ^ ~Y to X ^ Y. */
8161 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8162 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8163 return fold_build2 (code, type,
8164 fold_convert (type, TREE_OPERAND (arg0, 0)),
8165 fold_convert (type, TREE_OPERAND (arg1, 0)));
8167 /* See if this can be simplified into a rotate first. If that
8168 is unsuccessful continue in the association code. */
8172 if (integer_all_onesp (arg1))
8173 return non_lvalue (fold_convert (type, arg0));
8174 if (integer_zerop (arg1))
8175 return omit_one_operand (type, arg1, arg0);
8176 if (operand_equal_p (arg0, arg1, 0))
8177 return non_lvalue (fold_convert (type, arg0));
8179 /* ~X & X is always zero. */
8180 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8181 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8182 return omit_one_operand (type, integer_zero_node, arg1);
8184 /* X & ~X is always zero. */
8185 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8186 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8187 return omit_one_operand (type, integer_zero_node, arg0);
8189 t1 = distribute_bit_expr (code, type, arg0, arg1);
8190 if (t1 != NULL_TREE)
8192 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8193 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8194 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8197 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8199 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8200 && (~TREE_INT_CST_LOW (arg1)
8201 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8202 return fold_convert (type, TREE_OPERAND (arg0, 0));
8205 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8207 This results in more efficient code for machines without a NOR
8208 instruction. Combine will canonicalize to the first form
8209 which will allow use of NOR instructions provided by the
8210 backend if they exist. */
8211 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8212 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8214 return fold_build1 (BIT_NOT_EXPR, type,
8215 build2 (BIT_IOR_EXPR, type,
8216 TREE_OPERAND (arg0, 0),
8217 TREE_OPERAND (arg1, 0)));
8223 /* Don't touch a floating-point divide by zero unless the mode
8224 of the constant can represent infinity. */
8225 if (TREE_CODE (arg1) == REAL_CST
8226 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8227 && real_zerop (arg1))
8230 /* Optimize A / A to 1.0 if we don't care about
8231 NaNs or Infinities. */
8232 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8233 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8234 && operand_equal_p (arg0, arg1, 0))
8236 tree r = build_real (TREE_TYPE (arg0), dconst1);
8238 return omit_two_operands (type, r, arg0, arg1);
8241 /* (-A) / (-B) -> A / B */
8242 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8243 return fold_build2 (RDIV_EXPR, type,
8244 TREE_OPERAND (arg0, 0),
8245 negate_expr (arg1));
8246 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8247 return fold_build2 (RDIV_EXPR, type,
8249 TREE_OPERAND (arg1, 0));
8251 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8252 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8253 && real_onep (arg1))
8254 return non_lvalue (fold_convert (type, arg0));
8256 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8257 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8258 && real_minus_onep (arg1))
8259 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8261 /* If ARG1 is a constant, we can convert this to a multiply by the
8262 reciprocal. This does not have the same rounding properties,
8263 so only do this if -funsafe-math-optimizations. We can actually
8264 always safely do it if ARG1 is a power of two, but it's hard to
8265 tell if it is or not in a portable manner. */
8266 if (TREE_CODE (arg1) == REAL_CST)
8268 if (flag_unsafe_math_optimizations
8269 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8271 return fold_build2 (MULT_EXPR, type, arg0, tem);
8272 /* Find the reciprocal if optimizing and the result is exact. */
8276 r = TREE_REAL_CST (arg1);
8277 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8279 tem = build_real (type, r);
8280 return fold_build2 (MULT_EXPR, type,
8281 fold_convert (type, arg0), tem);
8285 /* Convert A/B/C to A/(B*C). */
8286 if (flag_unsafe_math_optimizations
8287 && TREE_CODE (arg0) == RDIV_EXPR)
8288 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8289 fold_build2 (MULT_EXPR, type,
8290 TREE_OPERAND (arg0, 1), arg1));
8292 /* Convert A/(B/C) to (A/B)*C. */
8293 if (flag_unsafe_math_optimizations
8294 && TREE_CODE (arg1) == RDIV_EXPR)
8295 return fold_build2 (MULT_EXPR, type,
8296 fold_build2 (RDIV_EXPR, type, arg0,
8297 TREE_OPERAND (arg1, 0)),
8298 TREE_OPERAND (arg1, 1));
8300 /* Convert C1/(X*C2) into (C1/C2)/X. */
8301 if (flag_unsafe_math_optimizations
8302 && TREE_CODE (arg1) == MULT_EXPR
8303 && TREE_CODE (arg0) == REAL_CST
8304 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8306 tree tem = const_binop (RDIV_EXPR, arg0,
8307 TREE_OPERAND (arg1, 1), 0);
8309 return fold_build2 (RDIV_EXPR, type, tem,
8310 TREE_OPERAND (arg1, 0));
8313 if (flag_unsafe_math_optimizations)
8315 enum built_in_function fcode = builtin_mathfn_code (arg1);
8316 /* Optimize x/expN(y) into x*expN(-y). */
8317 if (BUILTIN_EXPONENT_P (fcode))
8319 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8320 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8321 tree arglist = build_tree_list (NULL_TREE,
8322 fold_convert (type, arg));
8323 arg1 = build_function_call_expr (expfn, arglist);
8324 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8327 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8328 if (fcode == BUILT_IN_POW
8329 || fcode == BUILT_IN_POWF
8330 || fcode == BUILT_IN_POWL)
8332 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8333 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8334 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8335 tree neg11 = fold_convert (type, negate_expr (arg11));
8336 tree arglist = tree_cons(NULL_TREE, arg10,
8337 build_tree_list (NULL_TREE, neg11));
8338 arg1 = build_function_call_expr (powfn, arglist);
8339 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8343 if (flag_unsafe_math_optimizations)
8345 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8346 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8348 /* Optimize sin(x)/cos(x) as tan(x). */
8349 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8350 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8351 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8352 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8353 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8355 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8357 if (tanfn != NULL_TREE)
8358 return build_function_call_expr (tanfn,
8359 TREE_OPERAND (arg0, 1));
8362 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8363 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8364 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8365 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8366 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8367 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8369 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8371 if (tanfn != NULL_TREE)
8373 tree tmp = TREE_OPERAND (arg0, 1);
8374 tmp = build_function_call_expr (tanfn, tmp);
8375 return fold_build2 (RDIV_EXPR, type,
8376 build_real (type, dconst1), tmp);
8380 /* Optimize pow(x,c)/x as pow(x,c-1). */
8381 if (fcode0 == BUILT_IN_POW
8382 || fcode0 == BUILT_IN_POWF
8383 || fcode0 == BUILT_IN_POWL)
8385 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8386 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8387 if (TREE_CODE (arg01) == REAL_CST
8388 && ! TREE_CONSTANT_OVERFLOW (arg01)
8389 && operand_equal_p (arg1, arg00, 0))
8391 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8395 c = TREE_REAL_CST (arg01);
8396 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8397 arg = build_real (type, c);
8398 arglist = build_tree_list (NULL_TREE, arg);
8399 arglist = tree_cons (NULL_TREE, arg1, arglist);
8400 return build_function_call_expr (powfn, arglist);
8406 case TRUNC_DIV_EXPR:
8407 case ROUND_DIV_EXPR:
8408 case FLOOR_DIV_EXPR:
8410 case EXACT_DIV_EXPR:
8411 if (integer_onep (arg1))
8412 return non_lvalue (fold_convert (type, arg0));
8413 if (integer_zerop (arg1))
8416 if (!TYPE_UNSIGNED (type)
8417 && TREE_CODE (arg1) == INTEGER_CST
8418 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8419 && TREE_INT_CST_HIGH (arg1) == -1)
8420 return fold_convert (type, negate_expr (arg0));
8422 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8423 operation, EXACT_DIV_EXPR.
8425 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8426 At one time others generated faster code, it's not clear if they do
8427 after the last round to changes to the DIV code in expmed.c. */
8428 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8429 && multiple_of_p (type, arg0, arg1))
8430 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8432 if (TREE_CODE (arg1) == INTEGER_CST
8433 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8434 return fold_convert (type, tem);
8439 case FLOOR_MOD_EXPR:
8440 case ROUND_MOD_EXPR:
8441 case TRUNC_MOD_EXPR:
8442 /* X % 1 is always zero, but be sure to preserve any side
8444 if (integer_onep (arg1))
8445 return omit_one_operand (type, integer_zero_node, arg0);
8447 /* X % 0, return X % 0 unchanged so that we can get the
8448 proper warnings and errors. */
8449 if (integer_zerop (arg1))
8452 /* 0 % X is always zero, but be sure to preserve any side
8453 effects in X. Place this after checking for X == 0. */
8454 if (integer_zerop (arg0))
8455 return omit_one_operand (type, integer_zero_node, arg1);
8457 /* X % -1 is zero. */
8458 if (!TYPE_UNSIGNED (type)
8459 && TREE_CODE (arg1) == INTEGER_CST
8460 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8461 && TREE_INT_CST_HIGH (arg1) == -1)
8462 return omit_one_operand (type, integer_zero_node, arg0);
8464 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8465 i.e. "X % C" into "X & C2", if X and C are positive. */
8466 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8467 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8468 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8470 unsigned HOST_WIDE_INT high, low;
8474 l = tree_log2 (arg1);
8475 if (l >= HOST_BITS_PER_WIDE_INT)
8477 high = ((unsigned HOST_WIDE_INT) 1
8478 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8484 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8487 mask = build_int_cst_wide (type, low, high);
8488 return fold_build2 (BIT_AND_EXPR, type,
8489 fold_convert (type, arg0), mask);
8492 /* X % -C is the same as X % C. */
8493 if (code == TRUNC_MOD_EXPR
8494 && !TYPE_UNSIGNED (type)
8495 && TREE_CODE (arg1) == INTEGER_CST
8496 && !TREE_CONSTANT_OVERFLOW (arg1)
8497 && TREE_INT_CST_HIGH (arg1) < 0
8499 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8500 && !sign_bit_p (arg1, arg1))
8501 return fold_build2 (code, type, fold_convert (type, arg0),
8502 fold_convert (type, negate_expr (arg1)));
8504 /* X % -Y is the same as X % Y. */
8505 if (code == TRUNC_MOD_EXPR
8506 && !TYPE_UNSIGNED (type)
8507 && TREE_CODE (arg1) == NEGATE_EXPR
8509 return fold_build2 (code, type, fold_convert (type, arg0),
8510 fold_convert (type, TREE_OPERAND (arg1, 0)));
8512 if (TREE_CODE (arg1) == INTEGER_CST
8513 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8514 return fold_convert (type, tem);
8520 if (integer_all_onesp (arg0))
8521 return omit_one_operand (type, arg0, arg1);
8525 /* Optimize -1 >> x for arithmetic right shifts. */
8526 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8527 return omit_one_operand (type, arg0, arg1);
8528 /* ... fall through ... */
8532 if (integer_zerop (arg1))
8533 return non_lvalue (fold_convert (type, arg0));
8534 if (integer_zerop (arg0))
8535 return omit_one_operand (type, arg0, arg1);
8537 /* Since negative shift count is not well-defined,
8538 don't try to compute it in the compiler. */
8539 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8542 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8543 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8544 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8545 && host_integerp (TREE_OPERAND (arg0, 1), false)
8546 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8548 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8549 + TREE_INT_CST_LOW (arg1));
8551 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8552 being well defined. */
8553 if (low >= TYPE_PRECISION (type))
8555 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8556 low = low % TYPE_PRECISION (type);
8557 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8558 return build_int_cst (type, 0);
8560 low = TYPE_PRECISION (type) - 1;
8563 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8564 build_int_cst (type, low));
8567 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8568 into x & ((unsigned)-1 >> c) for unsigned types. */
8569 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8570 || (TYPE_UNSIGNED (type)
8571 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8572 && host_integerp (arg1, false)
8573 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8574 && host_integerp (TREE_OPERAND (arg0, 1), false)
8575 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8577 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8578 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8584 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8586 lshift = build_int_cst (type, -1);
8587 lshift = int_const_binop (code, lshift, arg1, 0);
8589 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8593 /* Rewrite an LROTATE_EXPR by a constant into an
8594 RROTATE_EXPR by a new constant. */
8595 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8597 tree tem = build_int_cst (NULL_TREE,
8598 GET_MODE_BITSIZE (TYPE_MODE (type)));
8599 tem = fold_convert (TREE_TYPE (arg1), tem);
8600 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8601 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8604 /* If we have a rotate of a bit operation with the rotate count and
8605 the second operand of the bit operation both constant,
8606 permute the two operations. */
8607 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8608 && (TREE_CODE (arg0) == BIT_AND_EXPR
8609 || TREE_CODE (arg0) == BIT_IOR_EXPR
8610 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8611 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8612 return fold_build2 (TREE_CODE (arg0), type,
8613 fold_build2 (code, type,
8614 TREE_OPERAND (arg0, 0), arg1),
8615 fold_build2 (code, type,
8616 TREE_OPERAND (arg0, 1), arg1));
8618 /* Two consecutive rotates adding up to the width of the mode can
8620 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8621 && TREE_CODE (arg0) == RROTATE_EXPR
8622 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8623 && TREE_INT_CST_HIGH (arg1) == 0
8624 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8625 && ((TREE_INT_CST_LOW (arg1)
8626 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8627 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8628 return TREE_OPERAND (arg0, 0);
8633 if (operand_equal_p (arg0, arg1, 0))
8634 return omit_one_operand (type, arg0, arg1);
8635 if (INTEGRAL_TYPE_P (type)
8636 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8637 return omit_one_operand (type, arg1, arg0);
8641 if (operand_equal_p (arg0, arg1, 0))
8642 return omit_one_operand (type, arg0, arg1);
8643 if (INTEGRAL_TYPE_P (type)
8644 && TYPE_MAX_VALUE (type)
8645 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8646 return omit_one_operand (type, arg1, arg0);
8649 case TRUTH_ANDIF_EXPR:
8650 /* Note that the operands of this must be ints
8651 and their values must be 0 or 1.
8652 ("true" is a fixed value perhaps depending on the language.) */
8653 /* If first arg is constant zero, return it. */
8654 if (integer_zerop (arg0))
8655 return fold_convert (type, arg0);
8656 case TRUTH_AND_EXPR:
8657 /* If either arg is constant true, drop it. */
8658 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8659 return non_lvalue (fold_convert (type, arg1));
8660 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8661 /* Preserve sequence points. */
8662 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8663 return non_lvalue (fold_convert (type, arg0));
8664 /* If second arg is constant zero, result is zero, but first arg
8665 must be evaluated. */
8666 if (integer_zerop (arg1))
8667 return omit_one_operand (type, arg1, arg0);
8668 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8669 case will be handled here. */
8670 if (integer_zerop (arg0))
8671 return omit_one_operand (type, arg0, arg1);
8673 /* !X && X is always false. */
8674 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8675 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8676 return omit_one_operand (type, integer_zero_node, arg1);
8677 /* X && !X is always false. */
8678 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8679 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8680 return omit_one_operand (type, integer_zero_node, arg0);
8682 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8683 means A >= Y && A != MAX, but in this case we know that
8686 if (!TREE_SIDE_EFFECTS (arg0)
8687 && !TREE_SIDE_EFFECTS (arg1))
8689 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8690 if (tem && !operand_equal_p (tem, arg0, 0))
8691 return fold_build2 (code, type, tem, arg1);
8693 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8694 if (tem && !operand_equal_p (tem, arg1, 0))
8695 return fold_build2 (code, type, arg0, tem);
8699 /* We only do these simplifications if we are optimizing. */
8703 /* Check for things like (A || B) && (A || C). We can convert this
8704 to A || (B && C). Note that either operator can be any of the four
8705 truth and/or operations and the transformation will still be
8706 valid. Also note that we only care about order for the
8707 ANDIF and ORIF operators. If B contains side effects, this
8708 might change the truth-value of A. */
8709 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8710 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8711 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8712 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8713 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8714 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8716 tree a00 = TREE_OPERAND (arg0, 0);
8717 tree a01 = TREE_OPERAND (arg0, 1);
8718 tree a10 = TREE_OPERAND (arg1, 0);
8719 tree a11 = TREE_OPERAND (arg1, 1);
8720 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8721 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8722 && (code == TRUTH_AND_EXPR
8723 || code == TRUTH_OR_EXPR));
8725 if (operand_equal_p (a00, a10, 0))
8726 return fold_build2 (TREE_CODE (arg0), type, a00,
8727 fold_build2 (code, type, a01, a11));
8728 else if (commutative && operand_equal_p (a00, a11, 0))
8729 return fold_build2 (TREE_CODE (arg0), type, a00,
8730 fold_build2 (code, type, a01, a10));
8731 else if (commutative && operand_equal_p (a01, a10, 0))
8732 return fold_build2 (TREE_CODE (arg0), type, a01,
8733 fold_build2 (code, type, a00, a11));
8735 /* This case if tricky because we must either have commutative
8736 operators or else A10 must not have side-effects. */
8738 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8739 && operand_equal_p (a01, a11, 0))
8740 return fold_build2 (TREE_CODE (arg0), type,
8741 fold_build2 (code, type, a00, a10),
8745 /* See if we can build a range comparison. */
8746 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8749 /* Check for the possibility of merging component references. If our
8750 lhs is another similar operation, try to merge its rhs with our
8751 rhs. Then try to merge our lhs and rhs. */
8752 if (TREE_CODE (arg0) == code
8753 && 0 != (tem = fold_truthop (code, type,
8754 TREE_OPERAND (arg0, 1), arg1)))
8755 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8757 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8762 case TRUTH_ORIF_EXPR:
8763 /* Note that the operands of this must be ints
8764 and their values must be 0 or true.
8765 ("true" is a fixed value perhaps depending on the language.) */
8766 /* If first arg is constant true, return it. */
8767 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8768 return fold_convert (type, arg0);
8770 /* If either arg is constant zero, drop it. */
8771 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8772 return non_lvalue (fold_convert (type, arg1));
8773 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8774 /* Preserve sequence points. */
8775 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8776 return non_lvalue (fold_convert (type, arg0));
8777 /* If second arg is constant true, result is true, but we must
8778 evaluate first arg. */
8779 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8780 return omit_one_operand (type, arg1, arg0);
8781 /* Likewise for first arg, but note this only occurs here for
8783 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8784 return omit_one_operand (type, arg0, arg1);
8786 /* !X || X is always true. */
8787 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8788 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8789 return omit_one_operand (type, integer_one_node, arg1);
8790 /* X || !X is always true. */
8791 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8792 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8793 return omit_one_operand (type, integer_one_node, arg0);
8797 case TRUTH_XOR_EXPR:
8798 /* If the second arg is constant zero, drop it. */
8799 if (integer_zerop (arg1))
8800 return non_lvalue (fold_convert (type, arg0));
8801 /* If the second arg is constant true, this is a logical inversion. */
8802 if (integer_onep (arg1))
8804 /* Only call invert_truthvalue if operand is a truth value. */
8805 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8806 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8808 tem = invert_truthvalue (arg0);
8809 return non_lvalue (fold_convert (type, tem));
8811 /* Identical arguments cancel to zero. */
8812 if (operand_equal_p (arg0, arg1, 0))
8813 return omit_one_operand (type, integer_zero_node, arg0);
8815 /* !X ^ X is always true. */
8816 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8817 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8818 return omit_one_operand (type, integer_one_node, arg1);
8820 /* X ^ !X is always true. */
8821 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8822 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8823 return omit_one_operand (type, integer_one_node, arg0);
8833 /* If one arg is a real or integer constant, put it last. */
8834 if (tree_swap_operands_p (arg0, arg1, true))
8835 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8837 /* bool_var != 0 becomes bool_var. */
8838 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8840 return non_lvalue (fold_convert (type, arg0));
8842 /* bool_var == 1 becomes bool_var. */
8843 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8845 return non_lvalue (fold_convert (type, arg0));
8847 /* If this is an equality comparison of the address of a non-weak
8848 object against zero, then we know the result. */
8849 if ((code == EQ_EXPR || code == NE_EXPR)
8850 && TREE_CODE (arg0) == ADDR_EXPR
8851 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8852 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8853 && integer_zerop (arg1))
8854 return constant_boolean_node (code != EQ_EXPR, type);
8856 /* If this is an equality comparison of the address of two non-weak,
8857 unaliased symbols neither of which are extern (since we do not
8858 have access to attributes for externs), then we know the result. */
8859 if ((code == EQ_EXPR || code == NE_EXPR)
8860 && TREE_CODE (arg0) == ADDR_EXPR
8861 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8862 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8863 && ! lookup_attribute ("alias",
8864 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8865 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8866 && TREE_CODE (arg1) == ADDR_EXPR
8867 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8868 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8869 && ! lookup_attribute ("alias",
8870 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8871 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8873 /* We know that we're looking at the address of two
8874 non-weak, unaliased, static _DECL nodes.
8876 It is both wasteful and incorrect to call operand_equal_p
8877 to compare the two ADDR_EXPR nodes. It is wasteful in that
8878 all we need to do is test pointer equality for the arguments
8879 to the two ADDR_EXPR nodes. It is incorrect to use
8880 operand_equal_p as that function is NOT equivalent to a
8881 C equality test. It can in fact return false for two
8882 objects which would test as equal using the C equality
8884 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8885 return constant_boolean_node (equal
8886 ? code == EQ_EXPR : code != EQ_EXPR,
8890 /* If this is a comparison of two exprs that look like an
8891 ARRAY_REF of the same object, then we can fold this to a
8892 comparison of the two offsets. */
8893 if (TREE_CODE_CLASS (code) == tcc_comparison)
8895 tree base0, offset0, base1, offset1;
8897 if (extract_array_ref (arg0, &base0, &offset0)
8898 && extract_array_ref (arg1, &base1, &offset1)
8899 && operand_equal_p (base0, base1, 0))
8901 /* Handle no offsets on both sides specially. */
8902 if (offset0 == NULL_TREE
8903 && offset1 == NULL_TREE)
8904 return fold_build2 (code, type, integer_zero_node,
8907 if (!offset0 || !offset1
8908 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8910 if (offset0 == NULL_TREE)
8911 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8912 if (offset1 == NULL_TREE)
8913 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8914 return fold_build2 (code, type, offset0, offset1);
8919 /* Transform comparisons of the form X +- C CMP X. */
8920 if ((code != EQ_EXPR && code != NE_EXPR)
8921 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8922 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8923 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8924 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8925 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8926 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8927 && !(flag_wrapv || flag_trapv))))
8929 tree arg01 = TREE_OPERAND (arg0, 1);
8930 enum tree_code code0 = TREE_CODE (arg0);
8933 if (TREE_CODE (arg01) == REAL_CST)
8934 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8936 is_positive = tree_int_cst_sgn (arg01);
8938 /* (X - c) > X becomes false. */
8940 && ((code0 == MINUS_EXPR && is_positive >= 0)
8941 || (code0 == PLUS_EXPR && is_positive <= 0)))
8942 return constant_boolean_node (0, type);
8944 /* Likewise (X + c) < X becomes false. */
8946 && ((code0 == PLUS_EXPR && is_positive >= 0)
8947 || (code0 == MINUS_EXPR && is_positive <= 0)))
8948 return constant_boolean_node (0, type);
8950 /* Convert (X - c) <= X to true. */
8951 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8953 && ((code0 == MINUS_EXPR && is_positive >= 0)
8954 || (code0 == PLUS_EXPR && is_positive <= 0)))
8955 return constant_boolean_node (1, type);
8957 /* Convert (X + c) >= X to true. */
8958 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8960 && ((code0 == PLUS_EXPR && is_positive >= 0)
8961 || (code0 == MINUS_EXPR && is_positive <= 0)))
8962 return constant_boolean_node (1, type);
8964 if (TREE_CODE (arg01) == INTEGER_CST)
8966 /* Convert X + c > X and X - c < X to true for integers. */
8968 && ((code0 == PLUS_EXPR && is_positive > 0)
8969 || (code0 == MINUS_EXPR && is_positive < 0)))
8970 return constant_boolean_node (1, type);
8973 && ((code0 == MINUS_EXPR && is_positive > 0)
8974 || (code0 == PLUS_EXPR && is_positive < 0)))
8975 return constant_boolean_node (1, type);
8977 /* Convert X + c <= X and X - c >= X to false for integers. */
8979 && ((code0 == PLUS_EXPR && is_positive > 0)
8980 || (code0 == MINUS_EXPR && is_positive < 0)))
8981 return constant_boolean_node (0, type);
8984 && ((code0 == MINUS_EXPR && is_positive > 0)
8985 || (code0 == PLUS_EXPR && is_positive < 0)))
8986 return constant_boolean_node (0, type);
8990 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8991 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8992 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8993 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8994 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8995 && !(flag_wrapv || flag_trapv))
8996 && (TREE_CODE (arg1) == INTEGER_CST
8997 && !TREE_OVERFLOW (arg1)))
8999 tree const1 = TREE_OPERAND (arg0, 1);
9001 tree variable = TREE_OPERAND (arg0, 0);
9004 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9006 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9007 TREE_TYPE (arg1), const2, const1);
9008 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9009 && (TREE_CODE (lhs) != INTEGER_CST
9010 || !TREE_OVERFLOW (lhs)))
9011 return fold_build2 (code, type, variable, lhs);
9014 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9016 tree targ0 = strip_float_extensions (arg0);
9017 tree targ1 = strip_float_extensions (arg1);
9018 tree newtype = TREE_TYPE (targ0);
9020 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9021 newtype = TREE_TYPE (targ1);
9023 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9024 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9025 return fold_build2 (code, type, fold_convert (newtype, targ0),
9026 fold_convert (newtype, targ1));
9028 /* (-a) CMP (-b) -> b CMP a */
9029 if (TREE_CODE (arg0) == NEGATE_EXPR
9030 && TREE_CODE (arg1) == NEGATE_EXPR)
9031 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9032 TREE_OPERAND (arg0, 0));
9034 if (TREE_CODE (arg1) == REAL_CST)
9036 REAL_VALUE_TYPE cst;
9037 cst = TREE_REAL_CST (arg1);
9039 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9040 if (TREE_CODE (arg0) == NEGATE_EXPR)
9042 fold_build2 (swap_tree_comparison (code), type,
9043 TREE_OPERAND (arg0, 0),
9044 build_real (TREE_TYPE (arg1),
9045 REAL_VALUE_NEGATE (cst)));
9047 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9048 /* a CMP (-0) -> a CMP 0 */
9049 if (REAL_VALUE_MINUS_ZERO (cst))
9050 return fold_build2 (code, type, arg0,
9051 build_real (TREE_TYPE (arg1), dconst0));
9053 /* x != NaN is always true, other ops are always false. */
9054 if (REAL_VALUE_ISNAN (cst)
9055 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9057 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9058 return omit_one_operand (type, tem, arg0);
9061 /* Fold comparisons against infinity. */
9062 if (REAL_VALUE_ISINF (cst))
9064 tem = fold_inf_compare (code, type, arg0, arg1);
9065 if (tem != NULL_TREE)
9070 /* If this is a comparison of a real constant with a PLUS_EXPR
9071 or a MINUS_EXPR of a real constant, we can convert it into a
9072 comparison with a revised real constant as long as no overflow
9073 occurs when unsafe_math_optimizations are enabled. */
9074 if (flag_unsafe_math_optimizations
9075 && TREE_CODE (arg1) == REAL_CST
9076 && (TREE_CODE (arg0) == PLUS_EXPR
9077 || TREE_CODE (arg0) == MINUS_EXPR)
9078 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9079 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9080 ? MINUS_EXPR : PLUS_EXPR,
9081 arg1, TREE_OPERAND (arg0, 1), 0))
9082 && ! TREE_CONSTANT_OVERFLOW (tem))
9083 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9085 /* Likewise, we can simplify a comparison of a real constant with
9086 a MINUS_EXPR whose first operand is also a real constant, i.e.
9087 (c1 - x) < c2 becomes x > c1-c2. */
9088 if (flag_unsafe_math_optimizations
9089 && TREE_CODE (arg1) == REAL_CST
9090 && TREE_CODE (arg0) == MINUS_EXPR
9091 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9092 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9094 && ! TREE_CONSTANT_OVERFLOW (tem))
9095 return fold_build2 (swap_tree_comparison (code), type,
9096 TREE_OPERAND (arg0, 1), tem);
9098 /* Fold comparisons against built-in math functions. */
9099 if (TREE_CODE (arg1) == REAL_CST
9100 && flag_unsafe_math_optimizations
9101 && ! flag_errno_math)
9103 enum built_in_function fcode = builtin_mathfn_code (arg0);
9105 if (fcode != END_BUILTINS)
9107 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9108 if (tem != NULL_TREE)
9114 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9115 if (TREE_CONSTANT (arg1)
9116 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9117 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9118 /* This optimization is invalid for ordered comparisons
9119 if CONST+INCR overflows or if foo+incr might overflow.
9120 This optimization is invalid for floating point due to rounding.
9121 For pointer types we assume overflow doesn't happen. */
9122 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9123 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9124 && (code == EQ_EXPR || code == NE_EXPR))))
9126 tree varop, newconst;
9128 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9130 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9131 arg1, TREE_OPERAND (arg0, 1));
9132 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9133 TREE_OPERAND (arg0, 0),
9134 TREE_OPERAND (arg0, 1));
9138 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9139 arg1, TREE_OPERAND (arg0, 1));
9140 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9141 TREE_OPERAND (arg0, 0),
9142 TREE_OPERAND (arg0, 1));
9146 /* If VAROP is a reference to a bitfield, we must mask
9147 the constant by the width of the field. */
9148 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9149 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9150 && host_integerp (DECL_SIZE (TREE_OPERAND
9151 (TREE_OPERAND (varop, 0), 1)), 1))
9153 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9154 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9155 tree folded_compare, shift;
9157 /* First check whether the comparison would come out
9158 always the same. If we don't do that we would
9159 change the meaning with the masking. */
9160 folded_compare = fold_build2 (code, type,
9161 TREE_OPERAND (varop, 0), arg1);
9162 if (integer_zerop (folded_compare)
9163 || integer_onep (folded_compare))
9164 return omit_one_operand (type, folded_compare, varop);
9166 shift = build_int_cst (NULL_TREE,
9167 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9168 shift = fold_convert (TREE_TYPE (varop), shift);
9169 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9171 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9175 return fold_build2 (code, type, varop, newconst);
9178 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9179 This transformation affects the cases which are handled in later
9180 optimizations involving comparisons with non-negative constants. */
9181 if (TREE_CODE (arg1) == INTEGER_CST
9182 && TREE_CODE (arg0) != INTEGER_CST
9183 && tree_int_cst_sgn (arg1) > 0)
9188 arg1 = const_binop (MINUS_EXPR, arg1,
9189 build_int_cst (TREE_TYPE (arg1), 1), 0);
9190 return fold_build2 (GT_EXPR, type, arg0,
9191 fold_convert (TREE_TYPE (arg0), arg1));
9194 arg1 = const_binop (MINUS_EXPR, arg1,
9195 build_int_cst (TREE_TYPE (arg1), 1), 0);
9196 return fold_build2 (LE_EXPR, type, arg0,
9197 fold_convert (TREE_TYPE (arg0), arg1));
9204 /* Comparisons with the highest or lowest possible integer of
9205 the specified size will have known values. */
9207 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9209 if (TREE_CODE (arg1) == INTEGER_CST
9210 && ! TREE_CONSTANT_OVERFLOW (arg1)
9211 && width <= 2 * HOST_BITS_PER_WIDE_INT
9212 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9213 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9215 HOST_WIDE_INT signed_max_hi;
9216 unsigned HOST_WIDE_INT signed_max_lo;
9217 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9219 if (width <= HOST_BITS_PER_WIDE_INT)
9221 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9226 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9228 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9234 max_lo = signed_max_lo;
9235 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9241 width -= HOST_BITS_PER_WIDE_INT;
9243 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9248 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9250 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9255 max_hi = signed_max_hi;
9256 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9260 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9261 && TREE_INT_CST_LOW (arg1) == max_lo)
9265 return omit_one_operand (type, integer_zero_node, arg0);
9268 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9271 return omit_one_operand (type, integer_one_node, arg0);
9274 return fold_build2 (NE_EXPR, type, arg0, arg1);
9276 /* The GE_EXPR and LT_EXPR cases above are not normally
9277 reached because of previous transformations. */
9282 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9284 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9288 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9289 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9291 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9292 return fold_build2 (NE_EXPR, type, arg0, arg1);
9296 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9298 && TREE_INT_CST_LOW (arg1) == min_lo)
9302 return omit_one_operand (type, integer_zero_node, arg0);
9305 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9308 return omit_one_operand (type, integer_one_node, arg0);
9311 return fold_build2 (NE_EXPR, type, op0, op1);
9316 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9318 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9322 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9323 return fold_build2 (NE_EXPR, type, arg0, arg1);
9325 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9326 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9331 else if (!in_gimple_form
9332 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9333 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9334 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9335 /* signed_type does not work on pointer types. */
9336 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9338 /* The following case also applies to X < signed_max+1
9339 and X >= signed_max+1 because previous transformations. */
9340 if (code == LE_EXPR || code == GT_EXPR)
9343 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9344 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9345 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9346 type, fold_convert (st0, arg0),
9347 build_int_cst (st1, 0));
9353 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9354 a MINUS_EXPR of a constant, we can convert it into a comparison with
9355 a revised constant as long as no overflow occurs. */
9356 if ((code == EQ_EXPR || code == NE_EXPR)
9357 && TREE_CODE (arg1) == INTEGER_CST
9358 && (TREE_CODE (arg0) == PLUS_EXPR
9359 || TREE_CODE (arg0) == MINUS_EXPR)
9360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9361 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9362 ? MINUS_EXPR : PLUS_EXPR,
9363 arg1, TREE_OPERAND (arg0, 1), 0))
9364 && ! TREE_CONSTANT_OVERFLOW (tem))
9365 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9367 /* Similarly for a NEGATE_EXPR. */
9368 else if ((code == EQ_EXPR || code == NE_EXPR)
9369 && TREE_CODE (arg0) == NEGATE_EXPR
9370 && TREE_CODE (arg1) == INTEGER_CST
9371 && 0 != (tem = negate_expr (arg1))
9372 && TREE_CODE (tem) == INTEGER_CST
9373 && ! TREE_CONSTANT_OVERFLOW (tem))
9374 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9376 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9377 for !=. Don't do this for ordered comparisons due to overflow. */
9378 else if ((code == NE_EXPR || code == EQ_EXPR)
9379 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9380 return fold_build2 (code, type,
9381 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9383 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9384 && (TREE_CODE (arg0) == NOP_EXPR
9385 || TREE_CODE (arg0) == CONVERT_EXPR))
9387 /* If we are widening one operand of an integer comparison,
9388 see if the other operand is similarly being widened. Perhaps we
9389 can do the comparison in the narrower type. */
9390 tem = fold_widened_comparison (code, type, arg0, arg1);
9394 /* Or if we are changing signedness. */
9395 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9400 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9401 constant, we can simplify it. */
9402 else if (TREE_CODE (arg1) == INTEGER_CST
9403 && (TREE_CODE (arg0) == MIN_EXPR
9404 || TREE_CODE (arg0) == MAX_EXPR)
9405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9407 tem = optimize_minmax_comparison (code, type, op0, op1);
9414 /* If we are comparing an ABS_EXPR with a constant, we can
9415 convert all the cases into explicit comparisons, but they may
9416 well not be faster than doing the ABS and one comparison.
9417 But ABS (X) <= C is a range comparison, which becomes a subtraction
9418 and a comparison, and is probably faster. */
9419 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9420 && TREE_CODE (arg0) == ABS_EXPR
9421 && ! TREE_SIDE_EFFECTS (arg0)
9422 && (0 != (tem = negate_expr (arg1)))
9423 && TREE_CODE (tem) == INTEGER_CST
9424 && ! TREE_CONSTANT_OVERFLOW (tem))
9425 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9426 build2 (GE_EXPR, type,
9427 TREE_OPERAND (arg0, 0), tem),
9428 build2 (LE_EXPR, type,
9429 TREE_OPERAND (arg0, 0), arg1));
9431 /* Convert ABS_EXPR<x> >= 0 to true. */
9432 else if (code == GE_EXPR
9433 && tree_expr_nonnegative_p (arg0)
9434 && (integer_zerop (arg1)
9435 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9436 && real_zerop (arg1))))
9437 return omit_one_operand (type, integer_one_node, arg0);
9439 /* Convert ABS_EXPR<x> < 0 to false. */
9440 else if (code == LT_EXPR
9441 && tree_expr_nonnegative_p (arg0)
9442 && (integer_zerop (arg1) || real_zerop (arg1)))
9443 return omit_one_operand (type, integer_zero_node, arg0);
9445 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9446 else if ((code == EQ_EXPR || code == NE_EXPR)
9447 && TREE_CODE (arg0) == ABS_EXPR
9448 && (integer_zerop (arg1) || real_zerop (arg1)))
9449 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9451 /* If this is an EQ or NE comparison with zero and ARG0 is
9452 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9453 two operations, but the latter can be done in one less insn
9454 on machines that have only two-operand insns or on which a
9455 constant cannot be the first operand. */
9456 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9457 && TREE_CODE (arg0) == BIT_AND_EXPR)
9459 tree arg00 = TREE_OPERAND (arg0, 0);
9460 tree arg01 = TREE_OPERAND (arg0, 1);
9461 if (TREE_CODE (arg00) == LSHIFT_EXPR
9462 && integer_onep (TREE_OPERAND (arg00, 0)))
9464 fold_build2 (code, type,
9465 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9466 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9467 arg01, TREE_OPERAND (arg00, 1)),
9468 fold_convert (TREE_TYPE (arg0),
9471 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9472 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9474 fold_build2 (code, type,
9475 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9476 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9477 arg00, TREE_OPERAND (arg01, 1)),
9478 fold_convert (TREE_TYPE (arg0),
9483 /* If this is an NE or EQ comparison of zero against the result of a
9484 signed MOD operation whose second operand is a power of 2, make
9485 the MOD operation unsigned since it is simpler and equivalent. */
9486 if ((code == NE_EXPR || code == EQ_EXPR)
9487 && integer_zerop (arg1)
9488 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9489 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9490 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9491 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9492 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9493 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9495 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9496 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9497 fold_convert (newtype,
9498 TREE_OPERAND (arg0, 0)),
9499 fold_convert (newtype,
9500 TREE_OPERAND (arg0, 1)));
9502 return fold_build2 (code, type, newmod,
9503 fold_convert (newtype, arg1));
9506 /* If this is an NE comparison of zero with an AND of one, remove the
9507 comparison since the AND will give the correct value. */
9508 if (code == NE_EXPR && integer_zerop (arg1)
9509 && TREE_CODE (arg0) == BIT_AND_EXPR
9510 && integer_onep (TREE_OPERAND (arg0, 1)))
9511 return fold_convert (type, arg0);
9513 /* If we have (A & C) == C where C is a power of 2, convert this into
9514 (A & C) != 0. Similarly for NE_EXPR. */
9515 if ((code == EQ_EXPR || code == NE_EXPR)
9516 && TREE_CODE (arg0) == BIT_AND_EXPR
9517 && integer_pow2p (TREE_OPERAND (arg0, 1))
9518 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9519 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9520 arg0, fold_convert (TREE_TYPE (arg0),
9521 integer_zero_node));
9523 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9524 bit, then fold the expression into A < 0 or A >= 0. */
9525 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9529 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9530 Similarly for NE_EXPR. */
9531 if ((code == EQ_EXPR || code == NE_EXPR)
9532 && TREE_CODE (arg0) == BIT_AND_EXPR
9533 && TREE_CODE (arg1) == INTEGER_CST
9534 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9536 tree notc = fold_build1 (BIT_NOT_EXPR,
9537 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9538 TREE_OPERAND (arg0, 1));
9539 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9541 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9542 if (integer_nonzerop (dandnotc))
9543 return omit_one_operand (type, rslt, arg0);
9546 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9547 Similarly for NE_EXPR. */
9548 if ((code == EQ_EXPR || code == NE_EXPR)
9549 && TREE_CODE (arg0) == BIT_IOR_EXPR
9550 && TREE_CODE (arg1) == INTEGER_CST
9551 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9553 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9554 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9555 TREE_OPERAND (arg0, 1), notd);
9556 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9557 if (integer_nonzerop (candnotd))
9558 return omit_one_operand (type, rslt, arg0);
9561 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9562 and similarly for >= into !=. */
9563 if ((code == LT_EXPR || code == GE_EXPR)
9564 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9565 && TREE_CODE (arg1) == LSHIFT_EXPR
9566 && integer_onep (TREE_OPERAND (arg1, 0)))
9567 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9568 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9569 TREE_OPERAND (arg1, 1)),
9570 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9572 else if ((code == LT_EXPR || code == GE_EXPR)
9573 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9574 && (TREE_CODE (arg1) == NOP_EXPR
9575 || TREE_CODE (arg1) == CONVERT_EXPR)
9576 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9577 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9579 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9580 fold_convert (TREE_TYPE (arg0),
9581 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9582 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9584 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9586 /* Simplify comparison of something with itself. (For IEEE
9587 floating-point, we can only do some of these simplifications.) */
9588 if (operand_equal_p (arg0, arg1, 0))
9593 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9594 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9595 return constant_boolean_node (1, type);
9600 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9601 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9602 return constant_boolean_node (1, type);
9603 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9606 /* For NE, we can only do this simplification if integer
9607 or we don't honor IEEE floating point NaNs. */
9608 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9609 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9611 /* ... fall through ... */
9614 return constant_boolean_node (0, type);
9620 /* If we are comparing an expression that just has comparisons
9621 of two integer values, arithmetic expressions of those comparisons,
9622 and constants, we can simplify it. There are only three cases
9623 to check: the two values can either be equal, the first can be
9624 greater, or the second can be greater. Fold the expression for
9625 those three values. Since each value must be 0 or 1, we have
9626 eight possibilities, each of which corresponds to the constant 0
9627 or 1 or one of the six possible comparisons.
9629 This handles common cases like (a > b) == 0 but also handles
9630 expressions like ((x > y) - (y > x)) > 0, which supposedly
9631 occur in macroized code. */
9633 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9635 tree cval1 = 0, cval2 = 0;
9638 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9639 /* Don't handle degenerate cases here; they should already
9640 have been handled anyway. */
9641 && cval1 != 0 && cval2 != 0
9642 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9643 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9644 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9645 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9646 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9647 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9648 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9650 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9651 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9653 /* We can't just pass T to eval_subst in case cval1 or cval2
9654 was the same as ARG1. */
9657 = fold_build2 (code, type,
9658 eval_subst (arg0, cval1, maxval,
9662 = fold_build2 (code, type,
9663 eval_subst (arg0, cval1, maxval,
9667 = fold_build2 (code, type,
9668 eval_subst (arg0, cval1, minval,
9672 /* All three of these results should be 0 or 1. Confirm they
9673 are. Then use those values to select the proper code
9676 if ((integer_zerop (high_result)
9677 || integer_onep (high_result))
9678 && (integer_zerop (equal_result)
9679 || integer_onep (equal_result))
9680 && (integer_zerop (low_result)
9681 || integer_onep (low_result)))
9683 /* Make a 3-bit mask with the high-order bit being the
9684 value for `>', the next for '=', and the low for '<'. */
9685 switch ((integer_onep (high_result) * 4)
9686 + (integer_onep (equal_result) * 2)
9687 + integer_onep (low_result))
9691 return omit_one_operand (type, integer_zero_node, arg0);
9712 return omit_one_operand (type, integer_one_node, arg0);
9716 return save_expr (build2 (code, type, cval1, cval2));
9718 return fold_build2 (code, type, cval1, cval2);
9723 /* If this is a comparison of a field, we may be able to simplify it. */
9724 if (((TREE_CODE (arg0) == COMPONENT_REF
9725 && lang_hooks.can_use_bit_fields_p ())
9726 || TREE_CODE (arg0) == BIT_FIELD_REF)
9727 && (code == EQ_EXPR || code == NE_EXPR)
9728 /* Handle the constant case even without -O
9729 to make sure the warnings are given. */
9730 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9732 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9737 /* Fold a comparison of the address of COMPONENT_REFs with the same
9738 type and component to a comparison of the address of the base
9739 object. In short, &x->a OP &y->a to x OP y and
9740 &x->a OP &y.a to x OP &y */
9741 if (TREE_CODE (arg0) == ADDR_EXPR
9742 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9743 && TREE_CODE (arg1) == ADDR_EXPR
9744 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9746 tree cref0 = TREE_OPERAND (arg0, 0);
9747 tree cref1 = TREE_OPERAND (arg1, 0);
9748 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9750 tree op0 = TREE_OPERAND (cref0, 0);
9751 tree op1 = TREE_OPERAND (cref1, 0);
9752 return fold_build2 (code, type,
9753 build_fold_addr_expr (op0),
9754 build_fold_addr_expr (op1));
9758 /* Optimize comparisons of strlen vs zero to a compare of the
9759 first character of the string vs zero. To wit,
9760 strlen(ptr) == 0 => *ptr == 0
9761 strlen(ptr) != 0 => *ptr != 0
9762 Other cases should reduce to one of these two (or a constant)
9763 due to the return value of strlen being unsigned. */
9764 if ((code == EQ_EXPR || code == NE_EXPR)
9765 && integer_zerop (arg1)
9766 && TREE_CODE (arg0) == CALL_EXPR)
9768 tree fndecl = get_callee_fndecl (arg0);
9772 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9773 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9774 && (arglist = TREE_OPERAND (arg0, 1))
9775 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9776 && ! TREE_CHAIN (arglist))
9778 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9779 return fold_build2 (code, type, iref,
9780 build_int_cst (TREE_TYPE (iref), 0));
9784 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9785 into a single range test. */
9786 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9787 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9788 && TREE_CODE (arg1) == INTEGER_CST
9789 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9790 && !integer_zerop (TREE_OPERAND (arg0, 1))
9791 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9792 && !TREE_OVERFLOW (arg1))
9794 t1 = fold_div_compare (code, type, arg0, arg1);
9795 if (t1 != NULL_TREE)
9799 if ((code == EQ_EXPR || code == NE_EXPR)
9800 && integer_zerop (arg1)
9801 && tree_expr_nonzero_p (arg0))
9803 tree res = constant_boolean_node (code==NE_EXPR, type);
9804 return omit_one_operand (type, res, arg0);
9807 t1 = fold_relational_const (code, type, arg0, arg1);
9808 return t1 == NULL_TREE ? NULL_TREE : t1;
9810 case UNORDERED_EXPR:
9818 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9820 t1 = fold_relational_const (code, type, arg0, arg1);
9821 if (t1 != NULL_TREE)
9825 /* If the first operand is NaN, the result is constant. */
9826 if (TREE_CODE (arg0) == REAL_CST
9827 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9828 && (code != LTGT_EXPR || ! flag_trapping_math))
9830 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9833 return omit_one_operand (type, t1, arg1);
9836 /* If the second operand is NaN, the result is constant. */
9837 if (TREE_CODE (arg1) == REAL_CST
9838 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9839 && (code != LTGT_EXPR || ! flag_trapping_math))
9841 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9844 return omit_one_operand (type, t1, arg0);
9847 /* Simplify unordered comparison of something with itself. */
9848 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9849 && operand_equal_p (arg0, arg1, 0))
9850 return constant_boolean_node (1, type);
9852 if (code == LTGT_EXPR
9853 && !flag_trapping_math
9854 && operand_equal_p (arg0, arg1, 0))
9855 return constant_boolean_node (0, type);
9857 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9859 tree targ0 = strip_float_extensions (arg0);
9860 tree targ1 = strip_float_extensions (arg1);
9861 tree newtype = TREE_TYPE (targ0);
9863 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9864 newtype = TREE_TYPE (targ1);
9866 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9867 return fold_build2 (code, type, fold_convert (newtype, targ0),
9868 fold_convert (newtype, targ1));
9874 /* When pedantic, a compound expression can be neither an lvalue
9875 nor an integer constant expression. */
9876 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9878 /* Don't let (0, 0) be null pointer constant. */
9879 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9880 : fold_convert (type, arg1);
9881 return pedantic_non_lvalue (tem);
9885 return build_complex (type, arg0, arg1);
9889 /* An ASSERT_EXPR should never be passed to fold_binary. */
9894 } /* switch (code) */
9897 /* Callback for walk_tree, looking for LABEL_EXPR.
9898 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9899 Do not check the sub-tree of GOTO_EXPR. */
9902 contains_label_1 (tree *tp,
9904 void *data ATTRIBUTE_UNUSED)
9906 switch (TREE_CODE (*tp))
9918 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9919 accessible from outside the sub-tree. Returns NULL_TREE if no
9920 addressable label is found. */
9923 contains_label_p (tree st)
9925 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9928 /* Fold a ternary expression of code CODE and type TYPE with operands
9929 OP0, OP1, and OP2. Return the folded expression if folding is
9930 successful. Otherwise, return NULL_TREE. */
9933 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9936 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9937 enum tree_code_class kind = TREE_CODE_CLASS (code);
9939 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9940 && TREE_CODE_LENGTH (code) == 3);
9942 /* Strip any conversions that don't change the mode. This is safe
9943 for every expression, except for a comparison expression because
9944 its signedness is derived from its operands. So, in the latter
9945 case, only strip conversions that don't change the signedness.
9947 Note that this is done as an internal manipulation within the
9948 constant folder, in order to find the simplest representation of
9949 the arguments so that their form can be studied. In any cases,
9950 the appropriate type conversions should be put back in the tree
9951 that will get out of the constant folder. */
9967 if (TREE_CODE (arg0) == CONSTRUCTOR
9968 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9970 unsigned HOST_WIDE_INT idx;
9972 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9979 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9980 so all simple results must be passed through pedantic_non_lvalue. */
9981 if (TREE_CODE (arg0) == INTEGER_CST)
9983 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9984 tem = integer_zerop (arg0) ? op2 : op1;
9985 /* Only optimize constant conditions when the selected branch
9986 has the same type as the COND_EXPR. This avoids optimizing
9987 away "c ? x : throw", where the throw has a void type.
9988 Avoid throwing away that operand which contains label. */
9989 if ((!TREE_SIDE_EFFECTS (unused_op)
9990 || !contains_label_p (unused_op))
9991 && (! VOID_TYPE_P (TREE_TYPE (tem))
9992 || VOID_TYPE_P (type)))
9993 return pedantic_non_lvalue (tem);
9996 if (operand_equal_p (arg1, op2, 0))
9997 return pedantic_omit_one_operand (type, arg1, arg0);
9999 /* If we have A op B ? A : C, we may be able to convert this to a
10000 simpler expression, depending on the operation and the values
10001 of B and C. Signed zeros prevent all of these transformations,
10002 for reasons given above each one.
10004 Also try swapping the arguments and inverting the conditional. */
10005 if (COMPARISON_CLASS_P (arg0)
10006 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10007 arg1, TREE_OPERAND (arg0, 1))
10008 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10010 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10015 if (COMPARISON_CLASS_P (arg0)
10016 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10018 TREE_OPERAND (arg0, 1))
10019 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10021 tem = invert_truthvalue (arg0);
10022 if (COMPARISON_CLASS_P (tem))
10024 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10030 /* If the second operand is simpler than the third, swap them
10031 since that produces better jump optimization results. */
10032 if (truth_value_p (TREE_CODE (arg0))
10033 && tree_swap_operands_p (op1, op2, false))
10035 /* See if this can be inverted. If it can't, possibly because
10036 it was a floating-point inequality comparison, don't do
10038 tem = invert_truthvalue (arg0);
10040 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10041 return fold_build3 (code, type, tem, op2, op1);
10044 /* Convert A ? 1 : 0 to simply A. */
10045 if (integer_onep (op1)
10046 && integer_zerop (op2)
10047 /* If we try to convert OP0 to our type, the
10048 call to fold will try to move the conversion inside
10049 a COND, which will recurse. In that case, the COND_EXPR
10050 is probably the best choice, so leave it alone. */
10051 && type == TREE_TYPE (arg0))
10052 return pedantic_non_lvalue (arg0);
10054 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10055 over COND_EXPR in cases such as floating point comparisons. */
10056 if (integer_zerop (op1)
10057 && integer_onep (op2)
10058 && truth_value_p (TREE_CODE (arg0)))
10059 return pedantic_non_lvalue (fold_convert (type,
10060 invert_truthvalue (arg0)));
10062 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10063 if (TREE_CODE (arg0) == LT_EXPR
10064 && integer_zerop (TREE_OPERAND (arg0, 1))
10065 && integer_zerop (op2)
10066 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10067 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10068 TREE_TYPE (tem), tem, arg1));
10070 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10071 already handled above. */
10072 if (TREE_CODE (arg0) == BIT_AND_EXPR
10073 && integer_onep (TREE_OPERAND (arg0, 1))
10074 && integer_zerop (op2)
10075 && integer_pow2p (arg1))
10077 tree tem = TREE_OPERAND (arg0, 0);
10079 if (TREE_CODE (tem) == RSHIFT_EXPR
10080 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10081 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10082 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10083 return fold_build2 (BIT_AND_EXPR, type,
10084 TREE_OPERAND (tem, 0), arg1);
10087 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10088 is probably obsolete because the first operand should be a
10089 truth value (that's why we have the two cases above), but let's
10090 leave it in until we can confirm this for all front-ends. */
10091 if (integer_zerop (op2)
10092 && TREE_CODE (arg0) == NE_EXPR
10093 && integer_zerop (TREE_OPERAND (arg0, 1))
10094 && integer_pow2p (arg1)
10095 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10096 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10097 arg1, OEP_ONLY_CONST))
10098 return pedantic_non_lvalue (fold_convert (type,
10099 TREE_OPERAND (arg0, 0)));
10101 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10102 if (integer_zerop (op2)
10103 && truth_value_p (TREE_CODE (arg0))
10104 && truth_value_p (TREE_CODE (arg1)))
10105 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10107 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10108 if (integer_onep (op2)
10109 && truth_value_p (TREE_CODE (arg0))
10110 && truth_value_p (TREE_CODE (arg1)))
10112 /* Only perform transformation if ARG0 is easily inverted. */
10113 tem = invert_truthvalue (arg0);
10114 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10115 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10118 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10119 if (integer_zerop (arg1)
10120 && truth_value_p (TREE_CODE (arg0))
10121 && truth_value_p (TREE_CODE (op2)))
10123 /* Only perform transformation if ARG0 is easily inverted. */
10124 tem = invert_truthvalue (arg0);
10125 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10126 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10129 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10130 if (integer_onep (arg1)
10131 && truth_value_p (TREE_CODE (arg0))
10132 && truth_value_p (TREE_CODE (op2)))
10133 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10138 /* Check for a built-in function. */
10139 if (TREE_CODE (op0) == ADDR_EXPR
10140 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10141 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10142 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10145 case BIT_FIELD_REF:
10146 if (TREE_CODE (arg0) == VECTOR_CST
10147 && type == TREE_TYPE (TREE_TYPE (arg0))
10148 && host_integerp (arg1, 1)
10149 && host_integerp (op2, 1))
10151 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10152 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10155 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10156 && (idx % width) == 0
10157 && (idx = idx / width)
10158 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10160 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10161 while (idx-- > 0 && elements)
10162 elements = TREE_CHAIN (elements);
10164 return TREE_VALUE (elements);
10166 return fold_convert (type, integer_zero_node);
10173 } /* switch (code) */
10176 /* Perform constant folding and related simplification of EXPR.
10177 The related simplifications include x*1 => x, x*0 => 0, etc.,
10178 and application of the associative law.
10179 NOP_EXPR conversions may be removed freely (as long as we
10180 are careful not to change the type of the overall expression).
10181 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10182 but we can constant-fold them if they have constant operands. */
10184 #ifdef ENABLE_FOLD_CHECKING
10185 # define fold(x) fold_1 (x)
10186 static tree fold_1 (tree);
10192 const tree t = expr;
10193 enum tree_code code = TREE_CODE (t);
10194 enum tree_code_class kind = TREE_CODE_CLASS (code);
10197 /* Return right away if a constant. */
10198 if (kind == tcc_constant)
10201 if (IS_EXPR_CODE_CLASS (kind))
10203 tree type = TREE_TYPE (t);
10204 tree op0, op1, op2;
10206 switch (TREE_CODE_LENGTH (code))
10209 op0 = TREE_OPERAND (t, 0);
10210 tem = fold_unary (code, type, op0);
10211 return tem ? tem : expr;
10213 op0 = TREE_OPERAND (t, 0);
10214 op1 = TREE_OPERAND (t, 1);
10215 tem = fold_binary (code, type, op0, op1);
10216 return tem ? tem : expr;
10218 op0 = TREE_OPERAND (t, 0);
10219 op1 = TREE_OPERAND (t, 1);
10220 op2 = TREE_OPERAND (t, 2);
10221 tem = fold_ternary (code, type, op0, op1, op2);
10222 return tem ? tem : expr;
10231 return fold (DECL_INITIAL (t));
10235 } /* switch (code) */
10238 #ifdef ENABLE_FOLD_CHECKING
10241 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10242 static void fold_check_failed (tree, tree);
10243 void print_fold_checksum (tree);
10245 /* When --enable-checking=fold, compute a digest of expr before
10246 and after actual fold call to see if fold did not accidentally
10247 change original expr. */
10253 struct md5_ctx ctx;
10254 unsigned char checksum_before[16], checksum_after[16];
10257 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10258 md5_init_ctx (&ctx);
10259 fold_checksum_tree (expr, &ctx, ht);
10260 md5_finish_ctx (&ctx, checksum_before);
10263 ret = fold_1 (expr);
10265 md5_init_ctx (&ctx);
10266 fold_checksum_tree (expr, &ctx, ht);
10267 md5_finish_ctx (&ctx, checksum_after);
10270 if (memcmp (checksum_before, checksum_after, 16))
10271 fold_check_failed (expr, ret);
10277 print_fold_checksum (tree expr)
10279 struct md5_ctx ctx;
10280 unsigned char checksum[16], cnt;
10283 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10284 md5_init_ctx (&ctx);
10285 fold_checksum_tree (expr, &ctx, ht);
10286 md5_finish_ctx (&ctx, checksum);
10288 for (cnt = 0; cnt < 16; ++cnt)
10289 fprintf (stderr, "%02x", checksum[cnt]);
10290 putc ('\n', stderr);
10294 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10296 internal_error ("fold check: original tree changed by fold");
10300 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10303 enum tree_code code;
10304 char buf[sizeof (struct tree_function_decl)];
10309 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10310 <= sizeof (struct tree_function_decl))
10311 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10314 slot = htab_find_slot (ht, expr, INSERT);
10318 code = TREE_CODE (expr);
10319 if (TREE_CODE_CLASS (code) == tcc_declaration
10320 && DECL_ASSEMBLER_NAME_SET_P (expr))
10322 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10323 memcpy (buf, expr, tree_size (expr));
10325 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10327 else if (TREE_CODE_CLASS (code) == tcc_type
10328 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10329 || TYPE_CACHED_VALUES_P (expr)
10330 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10332 /* Allow these fields to be modified. */
10333 memcpy (buf, expr, tree_size (expr));
10335 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10336 TYPE_POINTER_TO (expr) = NULL;
10337 TYPE_REFERENCE_TO (expr) = NULL;
10338 if (TYPE_CACHED_VALUES_P (expr))
10340 TYPE_CACHED_VALUES_P (expr) = 0;
10341 TYPE_CACHED_VALUES (expr) = NULL;
10344 md5_process_bytes (expr, tree_size (expr), ctx);
10345 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10346 if (TREE_CODE_CLASS (code) != tcc_type
10347 && TREE_CODE_CLASS (code) != tcc_declaration
10348 && code != TREE_LIST)
10349 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10350 switch (TREE_CODE_CLASS (code))
10356 md5_process_bytes (TREE_STRING_POINTER (expr),
10357 TREE_STRING_LENGTH (expr), ctx);
10360 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10361 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10364 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10370 case tcc_exceptional:
10374 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10375 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10376 expr = TREE_CHAIN (expr);
10377 goto recursive_label;
10380 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10381 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10387 case tcc_expression:
10388 case tcc_reference:
10389 case tcc_comparison:
10392 case tcc_statement:
10393 len = TREE_CODE_LENGTH (code);
10394 for (i = 0; i < len; ++i)
10395 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10397 case tcc_declaration:
10398 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10399 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10400 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10401 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10402 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10403 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10404 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10405 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10406 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10408 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10410 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10411 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10412 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10416 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10417 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10418 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10419 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10420 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10421 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10422 if (INTEGRAL_TYPE_P (expr)
10423 || SCALAR_FLOAT_TYPE_P (expr))
10425 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10426 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10428 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10429 if (TREE_CODE (expr) == RECORD_TYPE
10430 || TREE_CODE (expr) == UNION_TYPE
10431 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10432 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10433 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10442 /* Fold a unary tree expression with code CODE of type TYPE with an
10443 operand OP0. Return a folded expression if successful. Otherwise,
10444 return a tree expression with code CODE of type TYPE with an
10448 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10451 #ifdef ENABLE_FOLD_CHECKING
10452 unsigned char checksum_before[16], checksum_after[16];
10453 struct md5_ctx ctx;
10456 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10457 md5_init_ctx (&ctx);
10458 fold_checksum_tree (op0, &ctx, ht);
10459 md5_finish_ctx (&ctx, checksum_before);
10463 tem = fold_unary (code, type, op0);
10465 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10467 #ifdef ENABLE_FOLD_CHECKING
10468 md5_init_ctx (&ctx);
10469 fold_checksum_tree (op0, &ctx, ht);
10470 md5_finish_ctx (&ctx, checksum_after);
10473 if (memcmp (checksum_before, checksum_after, 16))
10474 fold_check_failed (op0, tem);
10479 /* Fold a binary tree expression with code CODE of type TYPE with
10480 operands OP0 and OP1. Return a folded expression if successful.
10481 Otherwise, return a tree expression with code CODE of type TYPE
10482 with operands OP0 and OP1. */
10485 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10489 #ifdef ENABLE_FOLD_CHECKING
10490 unsigned char checksum_before_op0[16],
10491 checksum_before_op1[16],
10492 checksum_after_op0[16],
10493 checksum_after_op1[16];
10494 struct md5_ctx ctx;
10497 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10498 md5_init_ctx (&ctx);
10499 fold_checksum_tree (op0, &ctx, ht);
10500 md5_finish_ctx (&ctx, checksum_before_op0);
10503 md5_init_ctx (&ctx);
10504 fold_checksum_tree (op1, &ctx, ht);
10505 md5_finish_ctx (&ctx, checksum_before_op1);
10509 tem = fold_binary (code, type, op0, op1);
10511 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10513 #ifdef ENABLE_FOLD_CHECKING
10514 md5_init_ctx (&ctx);
10515 fold_checksum_tree (op0, &ctx, ht);
10516 md5_finish_ctx (&ctx, checksum_after_op0);
10519 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10520 fold_check_failed (op0, tem);
10522 md5_init_ctx (&ctx);
10523 fold_checksum_tree (op1, &ctx, ht);
10524 md5_finish_ctx (&ctx, checksum_after_op1);
10527 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10528 fold_check_failed (op1, tem);
10533 /* Fold a ternary tree expression with code CODE of type TYPE with
10534 operands OP0, OP1, and OP2. Return a folded expression if
10535 successful. Otherwise, return a tree expression with code CODE of
10536 type TYPE with operands OP0, OP1, and OP2. */
10539 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10543 #ifdef ENABLE_FOLD_CHECKING
10544 unsigned char checksum_before_op0[16],
10545 checksum_before_op1[16],
10546 checksum_before_op2[16],
10547 checksum_after_op0[16],
10548 checksum_after_op1[16],
10549 checksum_after_op2[16];
10550 struct md5_ctx ctx;
10553 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10554 md5_init_ctx (&ctx);
10555 fold_checksum_tree (op0, &ctx, ht);
10556 md5_finish_ctx (&ctx, checksum_before_op0);
10559 md5_init_ctx (&ctx);
10560 fold_checksum_tree (op1, &ctx, ht);
10561 md5_finish_ctx (&ctx, checksum_before_op1);
10564 md5_init_ctx (&ctx);
10565 fold_checksum_tree (op2, &ctx, ht);
10566 md5_finish_ctx (&ctx, checksum_before_op2);
10570 tem = fold_ternary (code, type, op0, op1, op2);
10572 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10574 #ifdef ENABLE_FOLD_CHECKING
10575 md5_init_ctx (&ctx);
10576 fold_checksum_tree (op0, &ctx, ht);
10577 md5_finish_ctx (&ctx, checksum_after_op0);
10580 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10581 fold_check_failed (op0, tem);
10583 md5_init_ctx (&ctx);
10584 fold_checksum_tree (op1, &ctx, ht);
10585 md5_finish_ctx (&ctx, checksum_after_op1);
10588 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10589 fold_check_failed (op1, tem);
10591 md5_init_ctx (&ctx);
10592 fold_checksum_tree (op2, &ctx, ht);
10593 md5_finish_ctx (&ctx, checksum_after_op2);
10596 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10597 fold_check_failed (op2, tem);
10602 /* Perform constant folding and related simplification of initializer
10603 expression EXPR. These behave identically to "fold_buildN" but ignore
10604 potential run-time traps and exceptions that fold must preserve. */
10606 #define START_FOLD_INIT \
10607 int saved_signaling_nans = flag_signaling_nans;\
10608 int saved_trapping_math = flag_trapping_math;\
10609 int saved_rounding_math = flag_rounding_math;\
10610 int saved_trapv = flag_trapv;\
10611 flag_signaling_nans = 0;\
10612 flag_trapping_math = 0;\
10613 flag_rounding_math = 0;\
10616 #define END_FOLD_INIT \
10617 flag_signaling_nans = saved_signaling_nans;\
10618 flag_trapping_math = saved_trapping_math;\
10619 flag_rounding_math = saved_rounding_math;\
10620 flag_trapv = saved_trapv
10623 fold_build1_initializer (enum tree_code code, tree type, tree op)
10628 result = fold_build1 (code, type, op);
10635 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10640 result = fold_build2 (code, type, op0, op1);
10647 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10653 result = fold_build3 (code, type, op0, op1, op2);
10659 #undef START_FOLD_INIT
10660 #undef END_FOLD_INIT
10662 /* Determine if first argument is a multiple of second argument. Return 0 if
10663 it is not, or we cannot easily determined it to be.
10665 An example of the sort of thing we care about (at this point; this routine
10666 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10667 fold cases do now) is discovering that
10669 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10675 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10677 This code also handles discovering that
10679 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10681 is a multiple of 8 so we don't have to worry about dealing with a
10682 possible remainder.
10684 Note that we *look* inside a SAVE_EXPR only to determine how it was
10685 calculated; it is not safe for fold to do much of anything else with the
10686 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10687 at run time. For example, the latter example above *cannot* be implemented
10688 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10689 evaluation time of the original SAVE_EXPR is not necessarily the same at
10690 the time the new expression is evaluated. The only optimization of this
10691 sort that would be valid is changing
10693 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10697 SAVE_EXPR (I) * SAVE_EXPR (J)
10699 (where the same SAVE_EXPR (J) is used in the original and the
10700 transformed version). */
10703 multiple_of_p (tree type, tree top, tree bottom)
10705 if (operand_equal_p (top, bottom, 0))
10708 if (TREE_CODE (type) != INTEGER_TYPE)
10711 switch (TREE_CODE (top))
10714 /* Bitwise and provides a power of two multiple. If the mask is
10715 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10716 if (!integer_pow2p (bottom))
10721 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10722 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10726 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10727 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10730 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10734 op1 = TREE_OPERAND (top, 1);
10735 /* const_binop may not detect overflow correctly,
10736 so check for it explicitly here. */
10737 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10738 > TREE_INT_CST_LOW (op1)
10739 && TREE_INT_CST_HIGH (op1) == 0
10740 && 0 != (t1 = fold_convert (type,
10741 const_binop (LSHIFT_EXPR,
10744 && ! TREE_OVERFLOW (t1))
10745 return multiple_of_p (type, t1, bottom);
10750 /* Can't handle conversions from non-integral or wider integral type. */
10751 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10752 || (TYPE_PRECISION (type)
10753 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10756 /* .. fall through ... */
10759 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10762 if (TREE_CODE (bottom) != INTEGER_CST
10763 || (TYPE_UNSIGNED (type)
10764 && (tree_int_cst_sgn (top) < 0
10765 || tree_int_cst_sgn (bottom) < 0)))
10767 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10775 /* Return true if `t' is known to be non-negative. */
10778 tree_expr_nonnegative_p (tree t)
10780 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10783 switch (TREE_CODE (t))
10786 /* We can't return 1 if flag_wrapv is set because
10787 ABS_EXPR<INT_MIN> = INT_MIN. */
10788 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10793 return tree_int_cst_sgn (t) >= 0;
10796 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10799 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10800 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10801 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10803 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10804 both unsigned and at least 2 bits shorter than the result. */
10805 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10806 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10807 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10809 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10810 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10811 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10812 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10814 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10815 TYPE_PRECISION (inner2)) + 1;
10816 return prec < TYPE_PRECISION (TREE_TYPE (t));
10822 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10824 /* x * x for floating point x is always non-negative. */
10825 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10827 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10828 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10831 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10832 both unsigned and their total bits is shorter than the result. */
10833 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10834 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10835 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10837 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10838 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10839 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10840 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10841 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10842 < TYPE_PRECISION (TREE_TYPE (t));
10848 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10849 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10855 case TRUNC_DIV_EXPR:
10856 case CEIL_DIV_EXPR:
10857 case FLOOR_DIV_EXPR:
10858 case ROUND_DIV_EXPR:
10859 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10860 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10862 case TRUNC_MOD_EXPR:
10863 case CEIL_MOD_EXPR:
10864 case FLOOR_MOD_EXPR:
10865 case ROUND_MOD_EXPR:
10867 case NON_LVALUE_EXPR:
10869 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10871 case COMPOUND_EXPR:
10873 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10876 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10879 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10880 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10884 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10885 tree outer_type = TREE_TYPE (t);
10887 if (TREE_CODE (outer_type) == REAL_TYPE)
10889 if (TREE_CODE (inner_type) == REAL_TYPE)
10890 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10891 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10893 if (TYPE_UNSIGNED (inner_type))
10895 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10898 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10900 if (TREE_CODE (inner_type) == REAL_TYPE)
10901 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10902 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10903 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10904 && TYPE_UNSIGNED (inner_type);
10911 tree temp = TARGET_EXPR_SLOT (t);
10912 t = TARGET_EXPR_INITIAL (t);
10914 /* If the initializer is non-void, then it's a normal expression
10915 that will be assigned to the slot. */
10916 if (!VOID_TYPE_P (t))
10917 return tree_expr_nonnegative_p (t);
10919 /* Otherwise, the initializer sets the slot in some way. One common
10920 way is an assignment statement at the end of the initializer. */
10923 if (TREE_CODE (t) == BIND_EXPR)
10924 t = expr_last (BIND_EXPR_BODY (t));
10925 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10926 || TREE_CODE (t) == TRY_CATCH_EXPR)
10927 t = expr_last (TREE_OPERAND (t, 0));
10928 else if (TREE_CODE (t) == STATEMENT_LIST)
10933 if (TREE_CODE (t) == MODIFY_EXPR
10934 && TREE_OPERAND (t, 0) == temp)
10935 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10942 tree fndecl = get_callee_fndecl (t);
10943 tree arglist = TREE_OPERAND (t, 1);
10944 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10945 switch (DECL_FUNCTION_CODE (fndecl))
10947 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10948 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10949 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10950 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10952 CASE_BUILTIN_F (BUILT_IN_ACOS)
10953 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10954 CASE_BUILTIN_F (BUILT_IN_CABS)
10955 CASE_BUILTIN_F (BUILT_IN_COSH)
10956 CASE_BUILTIN_F (BUILT_IN_ERFC)
10957 CASE_BUILTIN_F (BUILT_IN_EXP)
10958 CASE_BUILTIN_F (BUILT_IN_EXP10)
10959 CASE_BUILTIN_F (BUILT_IN_EXP2)
10960 CASE_BUILTIN_F (BUILT_IN_FABS)
10961 CASE_BUILTIN_F (BUILT_IN_FDIM)
10962 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10963 CASE_BUILTIN_F (BUILT_IN_POW10)
10964 CASE_BUILTIN_I (BUILT_IN_FFS)
10965 CASE_BUILTIN_I (BUILT_IN_PARITY)
10966 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10970 CASE_BUILTIN_F (BUILT_IN_SQRT)
10971 /* sqrt(-0.0) is -0.0. */
10972 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10974 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10976 CASE_BUILTIN_F (BUILT_IN_ASINH)
10977 CASE_BUILTIN_F (BUILT_IN_ATAN)
10978 CASE_BUILTIN_F (BUILT_IN_ATANH)
10979 CASE_BUILTIN_F (BUILT_IN_CBRT)
10980 CASE_BUILTIN_F (BUILT_IN_CEIL)
10981 CASE_BUILTIN_F (BUILT_IN_ERF)
10982 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10983 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10984 CASE_BUILTIN_F (BUILT_IN_FMOD)
10985 CASE_BUILTIN_F (BUILT_IN_FREXP)
10986 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10987 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10988 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10989 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10990 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10991 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10992 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10993 CASE_BUILTIN_F (BUILT_IN_LRINT)
10994 CASE_BUILTIN_F (BUILT_IN_LROUND)
10995 CASE_BUILTIN_F (BUILT_IN_MODF)
10996 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10997 CASE_BUILTIN_F (BUILT_IN_POW)
10998 CASE_BUILTIN_F (BUILT_IN_RINT)
10999 CASE_BUILTIN_F (BUILT_IN_ROUND)
11000 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11001 CASE_BUILTIN_F (BUILT_IN_SINH)
11002 CASE_BUILTIN_F (BUILT_IN_TANH)
11003 CASE_BUILTIN_F (BUILT_IN_TRUNC)
11004 /* True if the 1st argument is nonnegative. */
11005 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11007 CASE_BUILTIN_F (BUILT_IN_FMAX)
11008 /* True if the 1st OR 2nd arguments are nonnegative. */
11009 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11010 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11012 CASE_BUILTIN_F (BUILT_IN_FMIN)
11013 /* True if the 1st AND 2nd arguments are nonnegative. */
11014 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11015 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11017 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11018 /* True if the 2nd argument is nonnegative. */
11019 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11023 #undef CASE_BUILTIN_F
11024 #undef CASE_BUILTIN_I
11028 /* ... fall through ... */
11031 if (truth_value_p (TREE_CODE (t)))
11032 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11036 /* We don't know sign of `t', so be conservative and return false. */
11040 /* Return true when T is an address and is known to be nonzero.
11041 For floating point we further ensure that T is not denormal.
11042 Similar logic is present in nonzero_address in rtlanal.h. */
11045 tree_expr_nonzero_p (tree t)
11047 tree type = TREE_TYPE (t);
11049 /* Doing something useful for floating point would need more work. */
11050 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11053 switch (TREE_CODE (t))
11056 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11059 /* We used to test for !integer_zerop here. This does not work correctly
11060 if TREE_CONSTANT_OVERFLOW (t). */
11061 return (TREE_INT_CST_LOW (t) != 0
11062 || TREE_INT_CST_HIGH (t) != 0);
11065 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11067 /* With the presence of negative values it is hard
11068 to say something. */
11069 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11070 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11072 /* One of operands must be positive and the other non-negative. */
11073 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11074 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11079 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11081 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11082 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11088 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11089 tree outer_type = TREE_TYPE (t);
11091 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11092 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11098 tree base = get_base_address (TREE_OPERAND (t, 0));
11103 /* Weak declarations may link to NULL. */
11104 if (VAR_OR_FUNCTION_DECL_P (base))
11105 return !DECL_WEAK (base);
11107 /* Constants are never weak. */
11108 if (CONSTANT_CLASS_P (base))
11115 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11116 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11119 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11120 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11123 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11125 /* When both operands are nonzero, then MAX must be too. */
11126 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11129 /* MAX where operand 0 is positive is positive. */
11130 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11132 /* MAX where operand 1 is positive is positive. */
11133 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11134 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11138 case COMPOUND_EXPR:
11141 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11144 case NON_LVALUE_EXPR:
11145 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11148 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11149 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11152 return alloca_call_p (t);
11160 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11161 attempt to fold the expression to a constant without modifying TYPE,
11164 If the expression could be simplified to a constant, then return
11165 the constant. If the expression would not be simplified to a
11166 constant, then return NULL_TREE. */
11169 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11171 tree tem = fold_binary (code, type, op0, op1);
11172 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11175 /* Given the components of a unary expression CODE, TYPE and OP0,
11176 attempt to fold the expression to a constant without modifying
11179 If the expression could be simplified to a constant, then return
11180 the constant. If the expression would not be simplified to a
11181 constant, then return NULL_TREE. */
11184 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11186 tree tem = fold_unary (code, type, op0);
11187 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11190 /* If EXP represents referencing an element in a constant string
11191 (either via pointer arithmetic or array indexing), return the
11192 tree representing the value accessed, otherwise return NULL. */
11195 fold_read_from_constant_string (tree exp)
11197 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11199 tree exp1 = TREE_OPERAND (exp, 0);
11203 if (TREE_CODE (exp) == INDIRECT_REF)
11204 string = string_constant (exp1, &index);
11207 tree low_bound = array_ref_low_bound (exp);
11208 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11210 /* Optimize the special-case of a zero lower bound.
11212 We convert the low_bound to sizetype to avoid some problems
11213 with constant folding. (E.g. suppose the lower bound is 1,
11214 and its mode is QI. Without the conversion,l (ARRAY
11215 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11216 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11217 if (! integer_zerop (low_bound))
11218 index = size_diffop (index, fold_convert (sizetype, low_bound));
11224 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11225 && TREE_CODE (string) == STRING_CST
11226 && TREE_CODE (index) == INTEGER_CST
11227 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11228 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11230 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11231 return fold_convert (TREE_TYPE (exp),
11232 build_int_cst (NULL_TREE,
11233 (TREE_STRING_POINTER (string)
11234 [TREE_INT_CST_LOW (index)])));
11239 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11240 an integer constant or real constant.
11242 TYPE is the type of the result. */
11245 fold_negate_const (tree arg0, tree type)
11247 tree t = NULL_TREE;
11249 switch (TREE_CODE (arg0))
11253 unsigned HOST_WIDE_INT low;
11254 HOST_WIDE_INT high;
11255 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11256 TREE_INT_CST_HIGH (arg0),
11258 t = build_int_cst_wide (type, low, high);
11259 t = force_fit_type (t, 1,
11260 (overflow | TREE_OVERFLOW (arg0))
11261 && !TYPE_UNSIGNED (type),
11262 TREE_CONSTANT_OVERFLOW (arg0));
11267 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11271 gcc_unreachable ();
11277 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11278 an integer constant or real constant.
11280 TYPE is the type of the result. */
11283 fold_abs_const (tree arg0, tree type)
11285 tree t = NULL_TREE;
11287 switch (TREE_CODE (arg0))
11290 /* If the value is unsigned, then the absolute value is
11291 the same as the ordinary value. */
11292 if (TYPE_UNSIGNED (type))
11294 /* Similarly, if the value is non-negative. */
11295 else if (INT_CST_LT (integer_minus_one_node, arg0))
11297 /* If the value is negative, then the absolute value is
11301 unsigned HOST_WIDE_INT low;
11302 HOST_WIDE_INT high;
11303 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11304 TREE_INT_CST_HIGH (arg0),
11306 t = build_int_cst_wide (type, low, high);
11307 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11308 TREE_CONSTANT_OVERFLOW (arg0));
11313 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11314 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11320 gcc_unreachable ();
11326 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11327 constant. TYPE is the type of the result. */
11330 fold_not_const (tree arg0, tree type)
11332 tree t = NULL_TREE;
11334 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11336 t = build_int_cst_wide (type,
11337 ~ TREE_INT_CST_LOW (arg0),
11338 ~ TREE_INT_CST_HIGH (arg0));
11339 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11340 TREE_CONSTANT_OVERFLOW (arg0));
11345 /* Given CODE, a relational operator, the target type, TYPE and two
11346 constant operands OP0 and OP1, return the result of the
11347 relational operation. If the result is not a compile time
11348 constant, then return NULL_TREE. */
11351 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11353 int result, invert;
11355 /* From here on, the only cases we handle are when the result is
11356 known to be a constant. */
11358 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11360 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11361 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11363 /* Handle the cases where either operand is a NaN. */
11364 if (real_isnan (c0) || real_isnan (c1))
11374 case UNORDERED_EXPR:
11388 if (flag_trapping_math)
11394 gcc_unreachable ();
11397 return constant_boolean_node (result, type);
11400 return constant_boolean_node (real_compare (code, c0, c1), type);
11403 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11405 To compute GT, swap the arguments and do LT.
11406 To compute GE, do LT and invert the result.
11407 To compute LE, swap the arguments, do LT and invert the result.
11408 To compute NE, do EQ and invert the result.
11410 Therefore, the code below must handle only EQ and LT. */
11412 if (code == LE_EXPR || code == GT_EXPR)
11417 code = swap_tree_comparison (code);
11420 /* Note that it is safe to invert for real values here because we
11421 have already handled the one case that it matters. */
11424 if (code == NE_EXPR || code == GE_EXPR)
11427 code = invert_tree_comparison (code, false);
11430 /* Compute a result for LT or EQ if args permit;
11431 Otherwise return T. */
11432 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11434 if (code == EQ_EXPR)
11435 result = tree_int_cst_equal (op0, op1);
11436 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11437 result = INT_CST_LT_UNSIGNED (op0, op1);
11439 result = INT_CST_LT (op0, op1);
11446 return constant_boolean_node (result, type);
11449 /* Build an expression for the a clean point containing EXPR with type TYPE.
11450 Don't build a cleanup point expression for EXPR which don't have side
11454 fold_build_cleanup_point_expr (tree type, tree expr)
11456 /* If the expression does not have side effects then we don't have to wrap
11457 it with a cleanup point expression. */
11458 if (!TREE_SIDE_EFFECTS (expr))
11461 /* If the expression is a return, check to see if the expression inside the
11462 return has no side effects or the right hand side of the modify expression
11463 inside the return. If either don't have side effects set we don't need to
11464 wrap the expression in a cleanup point expression. Note we don't check the
11465 left hand side of the modify because it should always be a return decl. */
11466 if (TREE_CODE (expr) == RETURN_EXPR)
11468 tree op = TREE_OPERAND (expr, 0);
11469 if (!op || !TREE_SIDE_EFFECTS (op))
11471 op = TREE_OPERAND (op, 1);
11472 if (!TREE_SIDE_EFFECTS (op))
11476 return build1 (CLEANUP_POINT_EXPR, type, expr);
11479 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11480 avoid confusing the gimplify process. */
11483 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11485 /* The size of the object is not relevant when talking about its address. */
11486 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11487 t = TREE_OPERAND (t, 0);
11489 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11490 if (TREE_CODE (t) == INDIRECT_REF
11491 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11493 t = TREE_OPERAND (t, 0);
11494 if (TREE_TYPE (t) != ptrtype)
11495 t = build1 (NOP_EXPR, ptrtype, t);
11501 while (handled_component_p (base))
11502 base = TREE_OPERAND (base, 0);
11504 TREE_ADDRESSABLE (base) = 1;
11506 t = build1 (ADDR_EXPR, ptrtype, t);
11513 build_fold_addr_expr (tree t)
11515 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11518 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11519 of an indirection through OP0, or NULL_TREE if no simplification is
11523 fold_indirect_ref_1 (tree type, tree op0)
11529 subtype = TREE_TYPE (sub);
11530 if (!POINTER_TYPE_P (subtype))
11533 if (TREE_CODE (sub) == ADDR_EXPR)
11535 tree op = TREE_OPERAND (sub, 0);
11536 tree optype = TREE_TYPE (op);
11537 /* *&p => p; make sure to handle *&"str"[cst] here. */
11538 if (type == optype)
11540 tree fop = fold_read_from_constant_string (op);
11546 /* *(foo *)&fooarray => fooarray[0] */
11547 else if (TREE_CODE (optype) == ARRAY_TYPE
11548 && type == TREE_TYPE (optype))
11550 tree type_domain = TYPE_DOMAIN (optype);
11551 tree min_val = size_zero_node;
11552 if (type_domain && TYPE_MIN_VALUE (type_domain))
11553 min_val = TYPE_MIN_VALUE (type_domain);
11554 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11558 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11559 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11560 && type == TREE_TYPE (TREE_TYPE (subtype)))
11563 tree min_val = size_zero_node;
11564 sub = build_fold_indirect_ref (sub);
11565 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11566 if (type_domain && TYPE_MIN_VALUE (type_domain))
11567 min_val = TYPE_MIN_VALUE (type_domain);
11568 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11574 /* Builds an expression for an indirection through T, simplifying some
11578 build_fold_indirect_ref (tree t)
11580 tree type = TREE_TYPE (TREE_TYPE (t));
11581 tree sub = fold_indirect_ref_1 (type, t);
11586 return build1 (INDIRECT_REF, type, t);
11589 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11592 fold_indirect_ref (tree t)
11594 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11602 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11603 whose result is ignored. The type of the returned tree need not be
11604 the same as the original expression. */
11607 fold_ignored_result (tree t)
11609 if (!TREE_SIDE_EFFECTS (t))
11610 return integer_zero_node;
11613 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11616 t = TREE_OPERAND (t, 0);
11620 case tcc_comparison:
11621 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11622 t = TREE_OPERAND (t, 0);
11623 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11624 t = TREE_OPERAND (t, 1);
11629 case tcc_expression:
11630 switch (TREE_CODE (t))
11632 case COMPOUND_EXPR:
11633 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11635 t = TREE_OPERAND (t, 0);
11639 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11640 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11642 t = TREE_OPERAND (t, 0);
11655 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11656 This can only be applied to objects of a sizetype. */
11659 round_up (tree value, int divisor)
11661 tree div = NULL_TREE;
11663 gcc_assert (divisor > 0);
11667 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11668 have to do anything. Only do this when we are not given a const,
11669 because in that case, this check is more expensive than just
11671 if (TREE_CODE (value) != INTEGER_CST)
11673 div = build_int_cst (TREE_TYPE (value), divisor);
11675 if (multiple_of_p (TREE_TYPE (value), value, div))
11679 /* If divisor is a power of two, simplify this to bit manipulation. */
11680 if (divisor == (divisor & -divisor))
11684 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11685 value = size_binop (PLUS_EXPR, value, t);
11686 t = build_int_cst (TREE_TYPE (value), -divisor);
11687 value = size_binop (BIT_AND_EXPR, value, t);
11692 div = build_int_cst (TREE_TYPE (value), divisor);
11693 value = size_binop (CEIL_DIV_EXPR, value, div);
11694 value = size_binop (MULT_EXPR, value, div);
11700 /* Likewise, but round down. */
11703 round_down (tree value, int divisor)
11705 tree div = NULL_TREE;
11707 gcc_assert (divisor > 0);
11711 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11712 have to do anything. Only do this when we are not given a const,
11713 because in that case, this check is more expensive than just
11715 if (TREE_CODE (value) != INTEGER_CST)
11717 div = build_int_cst (TREE_TYPE (value), divisor);
11719 if (multiple_of_p (TREE_TYPE (value), value, div))
11723 /* If divisor is a power of two, simplify this to bit manipulation. */
11724 if (divisor == (divisor & -divisor))
11728 t = build_int_cst (TREE_TYPE (value), -divisor);
11729 value = size_binop (BIT_AND_EXPR, value, t);
11734 div = build_int_cst (TREE_TYPE (value), divisor);
11735 value = size_binop (FLOOR_DIV_EXPR, value, div);
11736 value = size_binop (MULT_EXPR, value, div);
11742 /* Returns the pointer to the base of the object addressed by EXP and
11743 extracts the information about the offset of the access, storing it
11744 to PBITPOS and POFFSET. */
11747 split_address_to_core_and_offset (tree exp,
11748 HOST_WIDE_INT *pbitpos, tree *poffset)
11751 enum machine_mode mode;
11752 int unsignedp, volatilep;
11753 HOST_WIDE_INT bitsize;
11755 if (TREE_CODE (exp) == ADDR_EXPR)
11757 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11758 poffset, &mode, &unsignedp, &volatilep,
11760 core = build_fold_addr_expr (core);
11766 *poffset = NULL_TREE;
11772 /* Returns true if addresses of E1 and E2 differ by a constant, false
11773 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11776 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11779 HOST_WIDE_INT bitpos1, bitpos2;
11780 tree toffset1, toffset2, tdiff, type;
11782 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11783 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11785 if (bitpos1 % BITS_PER_UNIT != 0
11786 || bitpos2 % BITS_PER_UNIT != 0
11787 || !operand_equal_p (core1, core2, 0))
11790 if (toffset1 && toffset2)
11792 type = TREE_TYPE (toffset1);
11793 if (type != TREE_TYPE (toffset2))
11794 toffset2 = fold_convert (type, toffset2);
11796 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11797 if (!cst_and_fits_in_hwi (tdiff))
11800 *diff = int_cst_value (tdiff);
11802 else if (toffset1 || toffset2)
11804 /* If only one of the offsets is non-constant, the difference cannot
11811 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11815 /* Simplify the floating point expression EXP when the sign of the
11816 result is not significant. Return NULL_TREE if no simplification
11820 fold_strip_sign_ops (tree exp)
11824 switch (TREE_CODE (exp))
11828 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11829 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11833 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11835 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11836 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11837 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11838 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11839 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11840 arg1 ? arg1 : TREE_OPERAND (exp, 1));