1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 || (overflowable > 0 && sign_extended_type))
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
277 TREE_CONSTANT_OVERFLOW (t) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
298 h = h1 + h2 + (l < l1);
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 return (*hv & h1) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
355 for (j = 0; j < 4; j++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 if (SHIFT_COUNT_TRUNCATED)
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 unsigned HOST_WIDE_INT signmask;
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 if (SHIFT_COUNT_TRUNCATED)
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
663 { /* scale divisor and dividend */
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
742 decode (quo, lquo, hquo);
745 /* If result is negative, make it so. */
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, <wice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code)
868 CASE_FLT_FN (BUILT_IN_ASIN):
869 CASE_FLT_FN (BUILT_IN_ASINH):
870 CASE_FLT_FN (BUILT_IN_ATAN):
871 CASE_FLT_FN (BUILT_IN_ATANH):
872 CASE_FLT_FN (BUILT_IN_CBRT):
873 CASE_FLT_FN (BUILT_IN_SIN):
874 CASE_FLT_FN (BUILT_IN_SINH):
875 CASE_FLT_FN (BUILT_IN_TAN):
876 CASE_FLT_FN (BUILT_IN_TANH):
885 /* Check whether we may negate an integer constant T without causing
889 may_negate_without_overflow_p (tree t)
891 unsigned HOST_WIDE_INT val;
895 gcc_assert (TREE_CODE (t) == INTEGER_CST);
897 type = TREE_TYPE (t);
898 if (TYPE_UNSIGNED (type))
901 prec = TYPE_PRECISION (type);
902 if (prec > HOST_BITS_PER_WIDE_INT)
904 if (TREE_INT_CST_LOW (t) != 0)
906 prec -= HOST_BITS_PER_WIDE_INT;
907 val = TREE_INT_CST_HIGH (t);
910 val = TREE_INT_CST_LOW (t);
911 if (prec < HOST_BITS_PER_WIDE_INT)
912 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
913 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
916 /* Determine whether an expression T can be cheaply negated using
917 the function negate_expr. */
920 negate_expr_p (tree t)
927 type = TREE_TYPE (t);
930 switch (TREE_CODE (t))
933 if (TYPE_UNSIGNED (type) || ! flag_trapv)
936 /* Check that -CST will not overflow type. */
937 return may_negate_without_overflow_p (t);
939 return INTEGRAL_TYPE_P (type);
946 return negate_expr_p (TREE_REALPART (t))
947 && negate_expr_p (TREE_IMAGPART (t));
950 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952 /* -(A + B) -> (-B) - A. */
953 if (negate_expr_p (TREE_OPERAND (t, 1))
954 && reorder_operands_p (TREE_OPERAND (t, 0),
955 TREE_OPERAND (t, 1)))
957 /* -(A + B) -> (-A) - B. */
958 return negate_expr_p (TREE_OPERAND (t, 0));
961 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
962 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
963 && reorder_operands_p (TREE_OPERAND (t, 0),
964 TREE_OPERAND (t, 1));
967 if (TYPE_UNSIGNED (TREE_TYPE (t)))
973 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
974 return negate_expr_p (TREE_OPERAND (t, 1))
975 || negate_expr_p (TREE_OPERAND (t, 0));
983 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
989 /* Negate -((double)float) as (double)(-float). */
990 if (TREE_CODE (type) == REAL_TYPE)
992 tree tem = strip_float_extensions (t);
994 return negate_expr_p (tem);
999 /* Negate -f(x) as f(-x). */
1000 if (negate_mathfn_p (builtin_mathfn_code (t)))
1001 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1005 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1006 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1008 tree op1 = TREE_OPERAND (t, 1);
1009 if (TREE_INT_CST_HIGH (op1) == 0
1010 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1011 == TREE_INT_CST_LOW (op1))
1022 /* Given T, an expression, return the negation of T. Allow for T to be
1023 null, in which case return null. */
1026 negate_expr (tree t)
1034 type = TREE_TYPE (t);
1035 STRIP_SIGN_NOPS (t);
1037 switch (TREE_CODE (t))
1039 /* Convert - (~A) to A + 1. */
1041 if (INTEGRAL_TYPE_P (type))
1042 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1043 build_int_cst (type, 1));
1047 tem = fold_negate_const (t, type);
1048 if (! TREE_OVERFLOW (tem)
1049 || TYPE_UNSIGNED (type)
1055 tem = fold_negate_const (t, type);
1056 /* Two's complement FP formats, such as c4x, may overflow. */
1057 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1058 return fold_convert (type, tem);
1063 tree rpart = negate_expr (TREE_REALPART (t));
1064 tree ipart = negate_expr (TREE_IMAGPART (t));
1066 if ((TREE_CODE (rpart) == REAL_CST
1067 && TREE_CODE (ipart) == REAL_CST)
1068 || (TREE_CODE (rpart) == INTEGER_CST
1069 && TREE_CODE (ipart) == INTEGER_CST))
1070 return build_complex (type, rpart, ipart);
1075 return fold_convert (type, TREE_OPERAND (t, 0));
1078 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1080 /* -(A + B) -> (-B) - A. */
1081 if (negate_expr_p (TREE_OPERAND (t, 1))
1082 && reorder_operands_p (TREE_OPERAND (t, 0),
1083 TREE_OPERAND (t, 1)))
1085 tem = negate_expr (TREE_OPERAND (t, 1));
1086 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1087 tem, TREE_OPERAND (t, 0));
1088 return fold_convert (type, tem);
1091 /* -(A + B) -> (-A) - B. */
1092 if (negate_expr_p (TREE_OPERAND (t, 0)))
1094 tem = negate_expr (TREE_OPERAND (t, 0));
1095 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1096 tem, TREE_OPERAND (t, 1));
1097 return fold_convert (type, tem);
1103 /* - (A - B) -> B - A */
1104 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1105 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1106 return fold_convert (type,
1107 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1108 TREE_OPERAND (t, 1),
1109 TREE_OPERAND (t, 0)));
1113 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1119 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1121 tem = TREE_OPERAND (t, 1);
1122 if (negate_expr_p (tem))
1123 return fold_convert (type,
1124 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1125 TREE_OPERAND (t, 0),
1126 negate_expr (tem)));
1127 tem = TREE_OPERAND (t, 0);
1128 if (negate_expr_p (tem))
1129 return fold_convert (type,
1130 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1132 TREE_OPERAND (t, 1)));
1136 case TRUNC_DIV_EXPR:
1137 case ROUND_DIV_EXPR:
1138 case FLOOR_DIV_EXPR:
1140 case EXACT_DIV_EXPR:
1141 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1143 tem = TREE_OPERAND (t, 1);
1144 if (negate_expr_p (tem))
1145 return fold_convert (type,
1146 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1147 TREE_OPERAND (t, 0),
1148 negate_expr (tem)));
1149 tem = TREE_OPERAND (t, 0);
1150 if (negate_expr_p (tem))
1151 return fold_convert (type,
1152 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1154 TREE_OPERAND (t, 1)));
1159 /* Convert -((double)float) into (double)(-float). */
1160 if (TREE_CODE (type) == REAL_TYPE)
1162 tem = strip_float_extensions (t);
1163 if (tem != t && negate_expr_p (tem))
1164 return fold_convert (type, negate_expr (tem));
1169 /* Negate -f(x) as f(-x). */
1170 if (negate_mathfn_p (builtin_mathfn_code (t))
1171 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1173 tree fndecl, arg, arglist;
1175 fndecl = get_callee_fndecl (t);
1176 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1177 arglist = build_tree_list (NULL_TREE, arg);
1178 return build_function_call_expr (fndecl, arglist);
1183 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1184 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1186 tree op1 = TREE_OPERAND (t, 1);
1187 if (TREE_INT_CST_HIGH (op1) == 0
1188 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1189 == TREE_INT_CST_LOW (op1))
1191 tree ntype = TYPE_UNSIGNED (type)
1192 ? lang_hooks.types.signed_type (type)
1193 : lang_hooks.types.unsigned_type (type);
1194 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1195 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1196 return fold_convert (type, temp);
1205 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1206 return fold_convert (type, tem);
1209 /* Split a tree IN into a constant, literal and variable parts that could be
1210 combined with CODE to make IN. "constant" means an expression with
1211 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1212 commutative arithmetic operation. Store the constant part into *CONP,
1213 the literal in *LITP and return the variable part. If a part isn't
1214 present, set it to null. If the tree does not decompose in this way,
1215 return the entire tree as the variable part and the other parts as null.
1217 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1218 case, we negate an operand that was subtracted. Except if it is a
1219 literal for which we use *MINUS_LITP instead.
1221 If NEGATE_P is true, we are negating all of IN, again except a literal
1222 for which we use *MINUS_LITP instead.
1224 If IN is itself a literal or constant, return it as appropriate.
1226 Note that we do not guarantee that any of the three values will be the
1227 same type as IN, but they will have the same signedness and mode. */
1230 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1231 tree *minus_litp, int negate_p)
1239 /* Strip any conversions that don't change the machine mode or signedness. */
1240 STRIP_SIGN_NOPS (in);
1242 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1244 else if (TREE_CODE (in) == code
1245 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1246 /* We can associate addition and subtraction together (even
1247 though the C standard doesn't say so) for integers because
1248 the value is not affected. For reals, the value might be
1249 affected, so we can't. */
1250 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1251 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1253 tree op0 = TREE_OPERAND (in, 0);
1254 tree op1 = TREE_OPERAND (in, 1);
1255 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1256 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1258 /* First see if either of the operands is a literal, then a constant. */
1259 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1260 *litp = op0, op0 = 0;
1261 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1262 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1264 if (op0 != 0 && TREE_CONSTANT (op0))
1265 *conp = op0, op0 = 0;
1266 else if (op1 != 0 && TREE_CONSTANT (op1))
1267 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1269 /* If we haven't dealt with either operand, this is not a case we can
1270 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1271 if (op0 != 0 && op1 != 0)
1276 var = op1, neg_var_p = neg1_p;
1278 /* Now do any needed negations. */
1280 *minus_litp = *litp, *litp = 0;
1282 *conp = negate_expr (*conp);
1284 var = negate_expr (var);
1286 else if (TREE_CONSTANT (in))
1294 *minus_litp = *litp, *litp = 0;
1295 else if (*minus_litp)
1296 *litp = *minus_litp, *minus_litp = 0;
1297 *conp = negate_expr (*conp);
1298 var = negate_expr (var);
1304 /* Re-associate trees split by the above function. T1 and T2 are either
1305 expressions to associate or null. Return the new expression, if any. If
1306 we build an operation, do it in TYPE and with CODE. */
1309 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1316 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1317 try to fold this since we will have infinite recursion. But do
1318 deal with any NEGATE_EXPRs. */
1319 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1320 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1322 if (code == PLUS_EXPR)
1324 if (TREE_CODE (t1) == NEGATE_EXPR)
1325 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1326 fold_convert (type, TREE_OPERAND (t1, 0)));
1327 else if (TREE_CODE (t2) == NEGATE_EXPR)
1328 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1329 fold_convert (type, TREE_OPERAND (t2, 0)));
1330 else if (integer_zerop (t2))
1331 return fold_convert (type, t1);
1333 else if (code == MINUS_EXPR)
1335 if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1339 return build2 (code, type, fold_convert (type, t1),
1340 fold_convert (type, t2));
1343 return fold_build2 (code, type, fold_convert (type, t1),
1344 fold_convert (type, t2));
1347 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1348 to produce a new constant. Return NULL_TREE if we don't know how
1349 to evaluate CODE at compile-time.
1351 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1354 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1356 unsigned HOST_WIDE_INT int1l, int2l;
1357 HOST_WIDE_INT int1h, int2h;
1358 unsigned HOST_WIDE_INT low;
1360 unsigned HOST_WIDE_INT garbagel;
1361 HOST_WIDE_INT garbageh;
1363 tree type = TREE_TYPE (arg1);
1364 int uns = TYPE_UNSIGNED (type);
1366 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1369 int1l = TREE_INT_CST_LOW (arg1);
1370 int1h = TREE_INT_CST_HIGH (arg1);
1371 int2l = TREE_INT_CST_LOW (arg2);
1372 int2h = TREE_INT_CST_HIGH (arg2);
1377 low = int1l | int2l, hi = int1h | int2h;
1381 low = int1l ^ int2l, hi = int1h ^ int2h;
1385 low = int1l & int2l, hi = int1h & int2h;
1391 /* It's unclear from the C standard whether shifts can overflow.
1392 The following code ignores overflow; perhaps a C standard
1393 interpretation ruling is needed. */
1394 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1401 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1406 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1410 neg_double (int2l, int2h, &low, &hi);
1411 add_double (int1l, int1h, low, hi, &low, &hi);
1412 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1416 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1419 case TRUNC_DIV_EXPR:
1420 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1421 case EXACT_DIV_EXPR:
1422 /* This is a shortcut for a common special case. */
1423 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1424 && ! TREE_CONSTANT_OVERFLOW (arg1)
1425 && ! TREE_CONSTANT_OVERFLOW (arg2)
1426 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1428 if (code == CEIL_DIV_EXPR)
1431 low = int1l / int2l, hi = 0;
1435 /* ... fall through ... */
1437 case ROUND_DIV_EXPR:
1438 if (int2h == 0 && int2l == 0)
1440 if (int2h == 0 && int2l == 1)
1442 low = int1l, hi = int1h;
1445 if (int1l == int2l && int1h == int2h
1446 && ! (int1l == 0 && int1h == 0))
1451 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1452 &low, &hi, &garbagel, &garbageh);
1455 case TRUNC_MOD_EXPR:
1456 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1457 /* This is a shortcut for a common special case. */
1458 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1459 && ! TREE_CONSTANT_OVERFLOW (arg1)
1460 && ! TREE_CONSTANT_OVERFLOW (arg2)
1461 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1463 if (code == CEIL_MOD_EXPR)
1465 low = int1l % int2l, hi = 0;
1469 /* ... fall through ... */
1471 case ROUND_MOD_EXPR:
1472 if (int2h == 0 && int2l == 0)
1474 overflow = div_and_round_double (code, uns,
1475 int1l, int1h, int2l, int2h,
1476 &garbagel, &garbageh, &low, &hi);
1482 low = (((unsigned HOST_WIDE_INT) int1h
1483 < (unsigned HOST_WIDE_INT) int2h)
1484 || (((unsigned HOST_WIDE_INT) int1h
1485 == (unsigned HOST_WIDE_INT) int2h)
1488 low = (int1h < int2h
1489 || (int1h == int2h && int1l < int2l));
1491 if (low == (code == MIN_EXPR))
1492 low = int1l, hi = int1h;
1494 low = int2l, hi = int2h;
1501 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1505 /* Propagate overflow flags ourselves. */
1506 if (((!uns || is_sizetype) && overflow)
1507 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1510 TREE_OVERFLOW (t) = 1;
1511 TREE_CONSTANT_OVERFLOW (t) = 1;
1513 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1520 t = force_fit_type (t, 1,
1521 ((!uns || is_sizetype) && overflow)
1522 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1523 TREE_CONSTANT_OVERFLOW (arg1)
1524 | TREE_CONSTANT_OVERFLOW (arg2));
1529 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1530 constant. We assume ARG1 and ARG2 have the same data type, or at least
1531 are the same kind of constant and the same machine mode.
1533 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1536 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1541 if (TREE_CODE (arg1) == INTEGER_CST)
1542 return int_const_binop (code, arg1, arg2, notrunc);
1544 if (TREE_CODE (arg1) == REAL_CST)
1546 enum machine_mode mode;
1549 REAL_VALUE_TYPE value;
1550 REAL_VALUE_TYPE result;
1554 /* The following codes are handled by real_arithmetic. */
1569 d1 = TREE_REAL_CST (arg1);
1570 d2 = TREE_REAL_CST (arg2);
1572 type = TREE_TYPE (arg1);
1573 mode = TYPE_MODE (type);
1575 /* Don't perform operation if we honor signaling NaNs and
1576 either operand is a NaN. */
1577 if (HONOR_SNANS (mode)
1578 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1581 /* Don't perform operation if it would raise a division
1582 by zero exception. */
1583 if (code == RDIV_EXPR
1584 && REAL_VALUES_EQUAL (d2, dconst0)
1585 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1588 /* If either operand is a NaN, just return it. Otherwise, set up
1589 for floating-point trap; we return an overflow. */
1590 if (REAL_VALUE_ISNAN (d1))
1592 else if (REAL_VALUE_ISNAN (d2))
1595 inexact = real_arithmetic (&value, code, &d1, &d2);
1596 real_convert (&result, mode, &value);
1598 /* Don't constant fold this floating point operation if
1599 the result has overflowed and flag_trapping_math. */
1601 if (flag_trapping_math
1602 && MODE_HAS_INFINITIES (mode)
1603 && REAL_VALUE_ISINF (result)
1604 && !REAL_VALUE_ISINF (d1)
1605 && !REAL_VALUE_ISINF (d2))
1608 /* Don't constant fold this floating point operation if the
1609 result may dependent upon the run-time rounding mode and
1610 flag_rounding_math is set, or if GCC's software emulation
1611 is unable to accurately represent the result. */
1613 if ((flag_rounding_math
1614 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1615 && !flag_unsafe_math_optimizations))
1616 && (inexact || !real_identical (&result, &value)))
1619 t = build_real (type, result);
1621 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1622 TREE_CONSTANT_OVERFLOW (t)
1624 | TREE_CONSTANT_OVERFLOW (arg1)
1625 | TREE_CONSTANT_OVERFLOW (arg2);
1629 if (TREE_CODE (arg1) == COMPLEX_CST)
1631 tree type = TREE_TYPE (arg1);
1632 tree r1 = TREE_REALPART (arg1);
1633 tree i1 = TREE_IMAGPART (arg1);
1634 tree r2 = TREE_REALPART (arg2);
1635 tree i2 = TREE_IMAGPART (arg2);
1641 t = build_complex (type,
1642 const_binop (PLUS_EXPR, r1, r2, notrunc),
1643 const_binop (PLUS_EXPR, i1, i2, notrunc));
1647 t = build_complex (type,
1648 const_binop (MINUS_EXPR, r1, r2, notrunc),
1649 const_binop (MINUS_EXPR, i1, i2, notrunc));
1653 t = build_complex (type,
1654 const_binop (MINUS_EXPR,
1655 const_binop (MULT_EXPR,
1657 const_binop (MULT_EXPR,
1660 const_binop (PLUS_EXPR,
1661 const_binop (MULT_EXPR,
1663 const_binop (MULT_EXPR,
1670 tree t1, t2, real, imag;
1672 = const_binop (PLUS_EXPR,
1673 const_binop (MULT_EXPR, r2, r2, notrunc),
1674 const_binop (MULT_EXPR, i2, i2, notrunc),
1677 t1 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r1, r2, notrunc),
1679 const_binop (MULT_EXPR, i1, i2, notrunc),
1681 t2 = const_binop (MINUS_EXPR,
1682 const_binop (MULT_EXPR, i1, r2, notrunc),
1683 const_binop (MULT_EXPR, r1, i2, notrunc),
1686 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1688 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1689 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1693 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1699 t = build_complex (type, real, imag);
1711 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1712 indicates which particular sizetype to create. */
1715 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1717 return build_int_cst (sizetype_tab[(int) kind], number);
1720 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1721 is a tree code. The type of the result is taken from the operands.
1722 Both must be the same type integer type and it must be a size type.
1723 If the operands are constant, so is the result. */
1726 size_binop (enum tree_code code, tree arg0, tree arg1)
1728 tree type = TREE_TYPE (arg0);
1730 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1731 && type == TREE_TYPE (arg1));
1733 /* Handle the special case of two integer constants faster. */
1734 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1736 /* And some specific cases even faster than that. */
1737 if (code == PLUS_EXPR && integer_zerop (arg0))
1739 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1740 && integer_zerop (arg1))
1742 else if (code == MULT_EXPR && integer_onep (arg0))
1745 /* Handle general case of two integer constants. */
1746 return int_const_binop (code, arg0, arg1, 0);
1749 if (arg0 == error_mark_node || arg1 == error_mark_node)
1750 return error_mark_node;
1752 return fold_build2 (code, type, arg0, arg1);
1755 /* Given two values, either both of sizetype or both of bitsizetype,
1756 compute the difference between the two values. Return the value
1757 in signed type corresponding to the type of the operands. */
1760 size_diffop (tree arg0, tree arg1)
1762 tree type = TREE_TYPE (arg0);
1765 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1766 && type == TREE_TYPE (arg1));
1768 /* If the type is already signed, just do the simple thing. */
1769 if (!TYPE_UNSIGNED (type))
1770 return size_binop (MINUS_EXPR, arg0, arg1);
1772 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1774 /* If either operand is not a constant, do the conversions to the signed
1775 type and subtract. The hardware will do the right thing with any
1776 overflow in the subtraction. */
1777 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1778 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1779 fold_convert (ctype, arg1));
1781 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1782 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1783 overflow) and negate (which can't either). Special-case a result
1784 of zero while we're here. */
1785 if (tree_int_cst_equal (arg0, arg1))
1786 return build_int_cst (ctype, 0);
1787 else if (tree_int_cst_lt (arg1, arg0))
1788 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1790 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1791 fold_convert (ctype, size_binop (MINUS_EXPR,
1795 /* A subroutine of fold_convert_const handling conversions of an
1796 INTEGER_CST to another integer type. */
1799 fold_convert_const_int_from_int (tree type, tree arg1)
1803 /* Given an integer constant, make new constant with new type,
1804 appropriately sign-extended or truncated. */
1805 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1806 TREE_INT_CST_HIGH (arg1));
1808 t = force_fit_type (t,
1809 /* Don't set the overflow when
1810 converting a pointer */
1811 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1812 (TREE_INT_CST_HIGH (arg1) < 0
1813 && (TYPE_UNSIGNED (type)
1814 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1815 | TREE_OVERFLOW (arg1),
1816 TREE_CONSTANT_OVERFLOW (arg1));
1821 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1822 to an integer type. */
1825 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1830 /* The following code implements the floating point to integer
1831 conversion rules required by the Java Language Specification,
1832 that IEEE NaNs are mapped to zero and values that overflow
1833 the target precision saturate, i.e. values greater than
1834 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1835 are mapped to INT_MIN. These semantics are allowed by the
1836 C and C++ standards that simply state that the behavior of
1837 FP-to-integer conversion is unspecified upon overflow. */
1839 HOST_WIDE_INT high, low;
1841 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1845 case FIX_TRUNC_EXPR:
1846 real_trunc (&r, VOIDmode, &x);
1850 real_ceil (&r, VOIDmode, &x);
1853 case FIX_FLOOR_EXPR:
1854 real_floor (&r, VOIDmode, &x);
1857 case FIX_ROUND_EXPR:
1858 real_round (&r, VOIDmode, &x);
1865 /* If R is NaN, return zero and show we have an overflow. */
1866 if (REAL_VALUE_ISNAN (r))
1873 /* See if R is less than the lower bound or greater than the
1878 tree lt = TYPE_MIN_VALUE (type);
1879 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1880 if (REAL_VALUES_LESS (r, l))
1883 high = TREE_INT_CST_HIGH (lt);
1884 low = TREE_INT_CST_LOW (lt);
1890 tree ut = TYPE_MAX_VALUE (type);
1893 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1894 if (REAL_VALUES_LESS (u, r))
1897 high = TREE_INT_CST_HIGH (ut);
1898 low = TREE_INT_CST_LOW (ut);
1904 REAL_VALUE_TO_INT (&low, &high, r);
1906 t = build_int_cst_wide (type, low, high);
1908 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1909 TREE_CONSTANT_OVERFLOW (arg1));
1913 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1914 to another floating point type. */
1917 fold_convert_const_real_from_real (tree type, tree arg1)
1919 REAL_VALUE_TYPE value;
1922 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1923 t = build_real (type, value);
1925 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1926 TREE_CONSTANT_OVERFLOW (t)
1927 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1931 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1932 type TYPE. If no simplification can be done return NULL_TREE. */
1935 fold_convert_const (enum tree_code code, tree type, tree arg1)
1937 if (TREE_TYPE (arg1) == type)
1940 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1942 if (TREE_CODE (arg1) == INTEGER_CST)
1943 return fold_convert_const_int_from_int (type, arg1);
1944 else if (TREE_CODE (arg1) == REAL_CST)
1945 return fold_convert_const_int_from_real (code, type, arg1);
1947 else if (TREE_CODE (type) == REAL_TYPE)
1949 if (TREE_CODE (arg1) == INTEGER_CST)
1950 return build_real_from_int_cst (type, arg1);
1951 if (TREE_CODE (arg1) == REAL_CST)
1952 return fold_convert_const_real_from_real (type, arg1);
1957 /* Construct a vector of zero elements of vector type TYPE. */
1960 build_zero_vector (tree type)
1965 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1966 units = TYPE_VECTOR_SUBPARTS (type);
1969 for (i = 0; i < units; i++)
1970 list = tree_cons (NULL_TREE, elem, list);
1971 return build_vector (type, list);
1974 /* Convert expression ARG to type TYPE. Used by the middle-end for
1975 simple conversions in preference to calling the front-end's convert. */
1978 fold_convert (tree type, tree arg)
1980 tree orig = TREE_TYPE (arg);
1986 if (TREE_CODE (arg) == ERROR_MARK
1987 || TREE_CODE (type) == ERROR_MARK
1988 || TREE_CODE (orig) == ERROR_MARK)
1989 return error_mark_node;
1991 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1992 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1993 TYPE_MAIN_VARIANT (orig)))
1994 return fold_build1 (NOP_EXPR, type, arg);
1996 switch (TREE_CODE (type))
1998 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1999 case POINTER_TYPE: case REFERENCE_TYPE:
2001 if (TREE_CODE (arg) == INTEGER_CST)
2003 tem = fold_convert_const (NOP_EXPR, type, arg);
2004 if (tem != NULL_TREE)
2007 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2008 || TREE_CODE (orig) == OFFSET_TYPE)
2009 return fold_build1 (NOP_EXPR, type, arg);
2010 if (TREE_CODE (orig) == COMPLEX_TYPE)
2012 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2013 return fold_convert (type, tem);
2015 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2016 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 return fold_build1 (NOP_EXPR, type, arg);
2020 if (TREE_CODE (arg) == INTEGER_CST)
2022 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2023 if (tem != NULL_TREE)
2026 else if (TREE_CODE (arg) == REAL_CST)
2028 tem = fold_convert_const (NOP_EXPR, type, arg);
2029 if (tem != NULL_TREE)
2033 switch (TREE_CODE (orig))
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 return fold_build1 (FLOAT_EXPR, type, arg);
2041 return fold_build1 (NOP_EXPR, type, arg);
2044 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2045 return fold_convert (type, tem);
2052 switch (TREE_CODE (orig))
2055 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2056 case POINTER_TYPE: case REFERENCE_TYPE:
2058 return build2 (COMPLEX_EXPR, type,
2059 fold_convert (TREE_TYPE (type), arg),
2060 fold_convert (TREE_TYPE (type), integer_zero_node));
2065 if (TREE_CODE (arg) == COMPLEX_EXPR)
2067 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2068 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2069 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2072 arg = save_expr (arg);
2073 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2074 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2075 rpart = fold_convert (TREE_TYPE (type), rpart);
2076 ipart = fold_convert (TREE_TYPE (type), ipart);
2077 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 if (integer_zerop (arg))
2086 return build_zero_vector (type);
2087 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2088 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2089 || TREE_CODE (orig) == VECTOR_TYPE);
2090 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2093 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2100 /* Return false if expr can be assumed not to be an lvalue, true
2104 maybe_lvalue_p (tree x)
2106 /* We only need to wrap lvalue tree codes. */
2107 switch (TREE_CODE (x))
2118 case ALIGN_INDIRECT_REF:
2119 case MISALIGNED_INDIRECT_REF:
2121 case ARRAY_RANGE_REF:
2127 case PREINCREMENT_EXPR:
2128 case PREDECREMENT_EXPR:
2130 case TRY_CATCH_EXPR:
2131 case WITH_CLEANUP_EXPR:
2142 /* Assume the worst for front-end tree codes. */
2143 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2151 /* Return an expr equal to X but certainly not valid as an lvalue. */
2156 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2161 if (! maybe_lvalue_p (x))
2163 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2166 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2167 Zero means allow extended lvalues. */
2169 int pedantic_lvalues;
2171 /* When pedantic, return an expr equal to X but certainly not valid as a
2172 pedantic lvalue. Otherwise, return X. */
2175 pedantic_non_lvalue (tree x)
2177 if (pedantic_lvalues)
2178 return non_lvalue (x);
2183 /* Given a tree comparison code, return the code that is the logical inverse
2184 of the given code. It is not safe to do this for floating-point
2185 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2186 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2189 invert_tree_comparison (enum tree_code code, bool honor_nans)
2191 if (honor_nans && flag_trapping_math)
2201 return honor_nans ? UNLE_EXPR : LE_EXPR;
2203 return honor_nans ? UNLT_EXPR : LT_EXPR;
2205 return honor_nans ? UNGE_EXPR : GE_EXPR;
2207 return honor_nans ? UNGT_EXPR : GT_EXPR;
2221 return UNORDERED_EXPR;
2222 case UNORDERED_EXPR:
2223 return ORDERED_EXPR;
2229 /* Similar, but return the comparison that results if the operands are
2230 swapped. This is safe for floating-point. */
2233 swap_tree_comparison (enum tree_code code)
2240 case UNORDERED_EXPR:
2266 /* Convert a comparison tree code from an enum tree_code representation
2267 into a compcode bit-based encoding. This function is the inverse of
2268 compcode_to_comparison. */
2270 static enum comparison_code
2271 comparison_to_compcode (enum tree_code code)
2288 return COMPCODE_ORD;
2289 case UNORDERED_EXPR:
2290 return COMPCODE_UNORD;
2292 return COMPCODE_UNLT;
2294 return COMPCODE_UNEQ;
2296 return COMPCODE_UNLE;
2298 return COMPCODE_UNGT;
2300 return COMPCODE_LTGT;
2302 return COMPCODE_UNGE;
2308 /* Convert a compcode bit-based encoding of a comparison operator back
2309 to GCC's enum tree_code representation. This function is the
2310 inverse of comparison_to_compcode. */
2312 static enum tree_code
2313 compcode_to_comparison (enum comparison_code code)
2330 return ORDERED_EXPR;
2331 case COMPCODE_UNORD:
2332 return UNORDERED_EXPR;
2350 /* Return a tree for the comparison which is the combination of
2351 doing the AND or OR (depending on CODE) of the two operations LCODE
2352 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2353 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2354 if this makes the transformation invalid. */
2357 combine_comparisons (enum tree_code code, enum tree_code lcode,
2358 enum tree_code rcode, tree truth_type,
2359 tree ll_arg, tree lr_arg)
2361 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2362 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2363 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2364 enum comparison_code compcode;
2368 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2369 compcode = lcompcode & rcompcode;
2372 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2373 compcode = lcompcode | rcompcode;
2382 /* Eliminate unordered comparisons, as well as LTGT and ORD
2383 which are not used unless the mode has NaNs. */
2384 compcode &= ~COMPCODE_UNORD;
2385 if (compcode == COMPCODE_LTGT)
2386 compcode = COMPCODE_NE;
2387 else if (compcode == COMPCODE_ORD)
2388 compcode = COMPCODE_TRUE;
2390 else if (flag_trapping_math)
2392 /* Check that the original operation and the optimized ones will trap
2393 under the same condition. */
2394 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2395 && (lcompcode != COMPCODE_EQ)
2396 && (lcompcode != COMPCODE_ORD);
2397 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2398 && (rcompcode != COMPCODE_EQ)
2399 && (rcompcode != COMPCODE_ORD);
2400 bool trap = (compcode & COMPCODE_UNORD) == 0
2401 && (compcode != COMPCODE_EQ)
2402 && (compcode != COMPCODE_ORD);
2404 /* In a short-circuited boolean expression the LHS might be
2405 such that the RHS, if evaluated, will never trap. For
2406 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2407 if neither x nor y is NaN. (This is a mixed blessing: for
2408 example, the expression above will never trap, hence
2409 optimizing it to x < y would be invalid). */
2410 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2411 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2414 /* If the comparison was short-circuited, and only the RHS
2415 trapped, we may now generate a spurious trap. */
2417 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2420 /* If we changed the conditions that cause a trap, we lose. */
2421 if ((ltrap || rtrap) != trap)
2425 if (compcode == COMPCODE_TRUE)
2426 return constant_boolean_node (true, truth_type);
2427 else if (compcode == COMPCODE_FALSE)
2428 return constant_boolean_node (false, truth_type);
2430 return fold_build2 (compcode_to_comparison (compcode),
2431 truth_type, ll_arg, lr_arg);
2434 /* Return nonzero if CODE is a tree code that represents a truth value. */
2437 truth_value_p (enum tree_code code)
2439 return (TREE_CODE_CLASS (code) == tcc_comparison
2440 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2441 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2442 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2445 /* Return nonzero if two operands (typically of the same tree node)
2446 are necessarily equal. If either argument has side-effects this
2447 function returns zero. FLAGS modifies behavior as follows:
2449 If OEP_ONLY_CONST is set, only return nonzero for constants.
2450 This function tests whether the operands are indistinguishable;
2451 it does not test whether they are equal using C's == operation.
2452 The distinction is important for IEEE floating point, because
2453 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2454 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2456 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2457 even though it may hold multiple values during a function.
2458 This is because a GCC tree node guarantees that nothing else is
2459 executed between the evaluation of its "operands" (which may often
2460 be evaluated in arbitrary order). Hence if the operands themselves
2461 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2462 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2463 unset means assuming isochronic (or instantaneous) tree equivalence.
2464 Unless comparing arbitrary expression trees, such as from different
2465 statements, this flag can usually be left unset.
2467 If OEP_PURE_SAME is set, then pure functions with identical arguments
2468 are considered the same. It is used when the caller has other ways
2469 to ensure that global memory is unchanged in between. */
2472 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2474 /* If either is ERROR_MARK, they aren't equal. */
2475 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2478 /* If both types don't have the same signedness, then we can't consider
2479 them equal. We must check this before the STRIP_NOPS calls
2480 because they may change the signedness of the arguments. */
2481 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2487 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2488 /* This is needed for conversions and for COMPONENT_REF.
2489 Might as well play it safe and always test this. */
2490 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2491 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2492 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2495 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2496 We don't care about side effects in that case because the SAVE_EXPR
2497 takes care of that for us. In all other cases, two expressions are
2498 equal if they have no side effects. If we have two identical
2499 expressions with side effects that should be treated the same due
2500 to the only side effects being identical SAVE_EXPR's, that will
2501 be detected in the recursive calls below. */
2502 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2503 && (TREE_CODE (arg0) == SAVE_EXPR
2504 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2507 /* Next handle constant cases, those for which we can return 1 even
2508 if ONLY_CONST is set. */
2509 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2510 switch (TREE_CODE (arg0))
2513 return (! TREE_CONSTANT_OVERFLOW (arg0)
2514 && ! TREE_CONSTANT_OVERFLOW (arg1)
2515 && tree_int_cst_equal (arg0, arg1));
2518 return (! TREE_CONSTANT_OVERFLOW (arg0)
2519 && ! TREE_CONSTANT_OVERFLOW (arg1)
2520 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2521 TREE_REAL_CST (arg1)));
2527 if (TREE_CONSTANT_OVERFLOW (arg0)
2528 || TREE_CONSTANT_OVERFLOW (arg1))
2531 v1 = TREE_VECTOR_CST_ELTS (arg0);
2532 v2 = TREE_VECTOR_CST_ELTS (arg1);
2535 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2538 v1 = TREE_CHAIN (v1);
2539 v2 = TREE_CHAIN (v2);
2546 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2548 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2552 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2553 && ! memcmp (TREE_STRING_POINTER (arg0),
2554 TREE_STRING_POINTER (arg1),
2555 TREE_STRING_LENGTH (arg0)));
2558 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2564 if (flags & OEP_ONLY_CONST)
2567 /* Define macros to test an operand from arg0 and arg1 for equality and a
2568 variant that allows null and views null as being different from any
2569 non-null value. In the latter case, if either is null, the both
2570 must be; otherwise, do the normal comparison. */
2571 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2572 TREE_OPERAND (arg1, N), flags)
2574 #define OP_SAME_WITH_NULL(N) \
2575 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2576 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2578 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2581 /* Two conversions are equal only if signedness and modes match. */
2582 switch (TREE_CODE (arg0))
2587 case FIX_TRUNC_EXPR:
2588 case FIX_FLOOR_EXPR:
2589 case FIX_ROUND_EXPR:
2590 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2591 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2601 case tcc_comparison:
2603 if (OP_SAME (0) && OP_SAME (1))
2606 /* For commutative ops, allow the other order. */
2607 return (commutative_tree_code (TREE_CODE (arg0))
2608 && operand_equal_p (TREE_OPERAND (arg0, 0),
2609 TREE_OPERAND (arg1, 1), flags)
2610 && operand_equal_p (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 0), flags));
2614 /* If either of the pointer (or reference) expressions we are
2615 dereferencing contain a side effect, these cannot be equal. */
2616 if (TREE_SIDE_EFFECTS (arg0)
2617 || TREE_SIDE_EFFECTS (arg1))
2620 switch (TREE_CODE (arg0))
2623 case ALIGN_INDIRECT_REF:
2624 case MISALIGNED_INDIRECT_REF:
2630 case ARRAY_RANGE_REF:
2631 /* Operands 2 and 3 may be null. */
2634 && OP_SAME_WITH_NULL (2)
2635 && OP_SAME_WITH_NULL (3));
2638 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2639 may be NULL when we're called to compare MEM_EXPRs. */
2640 return OP_SAME_WITH_NULL (0)
2642 && OP_SAME_WITH_NULL (2);
2645 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2651 case tcc_expression:
2652 switch (TREE_CODE (arg0))
2655 case TRUTH_NOT_EXPR:
2658 case TRUTH_ANDIF_EXPR:
2659 case TRUTH_ORIF_EXPR:
2660 return OP_SAME (0) && OP_SAME (1);
2662 case TRUTH_AND_EXPR:
2664 case TRUTH_XOR_EXPR:
2665 if (OP_SAME (0) && OP_SAME (1))
2668 /* Otherwise take into account this is a commutative operation. */
2669 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2670 TREE_OPERAND (arg1, 1), flags)
2671 && operand_equal_p (TREE_OPERAND (arg0, 1),
2672 TREE_OPERAND (arg1, 0), flags));
2675 /* If the CALL_EXPRs call different functions, then they
2676 clearly can not be equal. */
2681 unsigned int cef = call_expr_flags (arg0);
2682 if (flags & OEP_PURE_SAME)
2683 cef &= ECF_CONST | ECF_PURE;
2690 /* Now see if all the arguments are the same. operand_equal_p
2691 does not handle TREE_LIST, so we walk the operands here
2692 feeding them to operand_equal_p. */
2693 arg0 = TREE_OPERAND (arg0, 1);
2694 arg1 = TREE_OPERAND (arg1, 1);
2695 while (arg0 && arg1)
2697 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2701 arg0 = TREE_CHAIN (arg0);
2702 arg1 = TREE_CHAIN (arg1);
2705 /* If we get here and both argument lists are exhausted
2706 then the CALL_EXPRs are equal. */
2707 return ! (arg0 || arg1);
2713 case tcc_declaration:
2714 /* Consider __builtin_sqrt equal to sqrt. */
2715 return (TREE_CODE (arg0) == FUNCTION_DECL
2716 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2717 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2718 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2725 #undef OP_SAME_WITH_NULL
2728 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2729 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2731 When in doubt, return 0. */
2734 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2736 int unsignedp1, unsignedpo;
2737 tree primarg0, primarg1, primother;
2738 unsigned int correct_width;
2740 if (operand_equal_p (arg0, arg1, 0))
2743 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2744 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2747 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2748 and see if the inner values are the same. This removes any
2749 signedness comparison, which doesn't matter here. */
2750 primarg0 = arg0, primarg1 = arg1;
2751 STRIP_NOPS (primarg0);
2752 STRIP_NOPS (primarg1);
2753 if (operand_equal_p (primarg0, primarg1, 0))
2756 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2757 actual comparison operand, ARG0.
2759 First throw away any conversions to wider types
2760 already present in the operands. */
2762 primarg1 = get_narrower (arg1, &unsignedp1);
2763 primother = get_narrower (other, &unsignedpo);
2765 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2766 if (unsignedp1 == unsignedpo
2767 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2768 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2770 tree type = TREE_TYPE (arg0);
2772 /* Make sure shorter operand is extended the right way
2773 to match the longer operand. */
2774 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2775 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2777 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2784 /* See if ARG is an expression that is either a comparison or is performing
2785 arithmetic on comparisons. The comparisons must only be comparing
2786 two different values, which will be stored in *CVAL1 and *CVAL2; if
2787 they are nonzero it means that some operands have already been found.
2788 No variables may be used anywhere else in the expression except in the
2789 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2790 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2792 If this is true, return 1. Otherwise, return zero. */
2795 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2797 enum tree_code code = TREE_CODE (arg);
2798 enum tree_code_class class = TREE_CODE_CLASS (code);
2800 /* We can handle some of the tcc_expression cases here. */
2801 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2803 else if (class == tcc_expression
2804 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2805 || code == COMPOUND_EXPR))
2808 else if (class == tcc_expression && code == SAVE_EXPR
2809 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2811 /* If we've already found a CVAL1 or CVAL2, this expression is
2812 two complex to handle. */
2813 if (*cval1 || *cval2)
2823 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2826 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2827 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2828 cval1, cval2, save_p));
2833 case tcc_expression:
2834 if (code == COND_EXPR)
2835 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2836 cval1, cval2, save_p)
2837 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2838 cval1, cval2, save_p)
2839 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2840 cval1, cval2, save_p));
2843 case tcc_comparison:
2844 /* First see if we can handle the first operand, then the second. For
2845 the second operand, we know *CVAL1 can't be zero. It must be that
2846 one side of the comparison is each of the values; test for the
2847 case where this isn't true by failing if the two operands
2850 if (operand_equal_p (TREE_OPERAND (arg, 0),
2851 TREE_OPERAND (arg, 1), 0))
2855 *cval1 = TREE_OPERAND (arg, 0);
2856 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2858 else if (*cval2 == 0)
2859 *cval2 = TREE_OPERAND (arg, 0);
2860 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2865 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2867 else if (*cval2 == 0)
2868 *cval2 = TREE_OPERAND (arg, 1);
2869 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2881 /* ARG is a tree that is known to contain just arithmetic operations and
2882 comparisons. Evaluate the operations in the tree substituting NEW0 for
2883 any occurrence of OLD0 as an operand of a comparison and likewise for
2887 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2889 tree type = TREE_TYPE (arg);
2890 enum tree_code code = TREE_CODE (arg);
2891 enum tree_code_class class = TREE_CODE_CLASS (code);
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2896 else if (class == tcc_expression
2897 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2903 return fold_build1 (code, type,
2904 eval_subst (TREE_OPERAND (arg, 0),
2905 old0, new0, old1, new1));
2908 return fold_build2 (code, type,
2909 eval_subst (TREE_OPERAND (arg, 0),
2910 old0, new0, old1, new1),
2911 eval_subst (TREE_OPERAND (arg, 1),
2912 old0, new0, old1, new1));
2914 case tcc_expression:
2918 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2921 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2924 return fold_build3 (code, type,
2925 eval_subst (TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1),
2927 eval_subst (TREE_OPERAND (arg, 1),
2928 old0, new0, old1, new1),
2929 eval_subst (TREE_OPERAND (arg, 2),
2930 old0, new0, old1, new1));
2934 /* Fall through - ??? */
2936 case tcc_comparison:
2938 tree arg0 = TREE_OPERAND (arg, 0);
2939 tree arg1 = TREE_OPERAND (arg, 1);
2941 /* We need to check both for exact equality and tree equality. The
2942 former will be true if the operand has a side-effect. In that
2943 case, we know the operand occurred exactly once. */
2945 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2947 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2950 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2952 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2955 return fold_build2 (code, type, arg0, arg1);
2963 /* Return a tree for the case when the result of an expression is RESULT
2964 converted to TYPE and OMITTED was previously an operand of the expression
2965 but is now not needed (e.g., we folded OMITTED * 0).
2967 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2968 the conversion of RESULT to TYPE. */
2971 omit_one_operand (tree type, tree result, tree omitted)
2973 tree t = fold_convert (type, result);
2975 if (TREE_SIDE_EFFECTS (omitted))
2976 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2978 return non_lvalue (t);
2981 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2984 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2986 tree t = fold_convert (type, result);
2988 if (TREE_SIDE_EFFECTS (omitted))
2989 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2991 return pedantic_non_lvalue (t);
2994 /* Return a tree for the case when the result of an expression is RESULT
2995 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2996 of the expression but are now not needed.
2998 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2999 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3000 evaluated before OMITTED2. Otherwise, if neither has side effects,
3001 just do the conversion of RESULT to TYPE. */
3004 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3006 tree t = fold_convert (type, result);
3008 if (TREE_SIDE_EFFECTS (omitted2))
3009 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3010 if (TREE_SIDE_EFFECTS (omitted1))
3011 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3013 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3017 /* Return a simplified tree node for the truth-negation of ARG. This
3018 never alters ARG itself. We assume that ARG is an operation that
3019 returns a truth value (0 or 1).
3021 FIXME: one would think we would fold the result, but it causes
3022 problems with the dominator optimizer. */
3024 invert_truthvalue (tree arg)
3026 tree type = TREE_TYPE (arg);
3027 enum tree_code code = TREE_CODE (arg);
3029 if (code == ERROR_MARK)
3032 /* If this is a comparison, we can simply invert it, except for
3033 floating-point non-equality comparisons, in which case we just
3034 enclose a TRUTH_NOT_EXPR around what we have. */
3036 if (TREE_CODE_CLASS (code) == tcc_comparison)
3038 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3039 if (FLOAT_TYPE_P (op_type)
3040 && flag_trapping_math
3041 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3042 && code != NE_EXPR && code != EQ_EXPR)
3043 return build1 (TRUTH_NOT_EXPR, type, arg);
3046 code = invert_tree_comparison (code,
3047 HONOR_NANS (TYPE_MODE (op_type)));
3048 if (code == ERROR_MARK)
3049 return build1 (TRUTH_NOT_EXPR, type, arg);
3051 return build2 (code, type,
3052 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3059 return constant_boolean_node (integer_zerop (arg), type);
3061 case TRUTH_AND_EXPR:
3062 return build2 (TRUTH_OR_EXPR, type,
3063 invert_truthvalue (TREE_OPERAND (arg, 0)),
3064 invert_truthvalue (TREE_OPERAND (arg, 1)));
3067 return build2 (TRUTH_AND_EXPR, type,
3068 invert_truthvalue (TREE_OPERAND (arg, 0)),
3069 invert_truthvalue (TREE_OPERAND (arg, 1)));
3071 case TRUTH_XOR_EXPR:
3072 /* Here we can invert either operand. We invert the first operand
3073 unless the second operand is a TRUTH_NOT_EXPR in which case our
3074 result is the XOR of the first operand with the inside of the
3075 negation of the second operand. */
3077 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3078 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3079 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3081 return build2 (TRUTH_XOR_EXPR, type,
3082 invert_truthvalue (TREE_OPERAND (arg, 0)),
3083 TREE_OPERAND (arg, 1));
3085 case TRUTH_ANDIF_EXPR:
3086 return build2 (TRUTH_ORIF_EXPR, type,
3087 invert_truthvalue (TREE_OPERAND (arg, 0)),
3088 invert_truthvalue (TREE_OPERAND (arg, 1)));
3090 case TRUTH_ORIF_EXPR:
3091 return build2 (TRUTH_ANDIF_EXPR, type,
3092 invert_truthvalue (TREE_OPERAND (arg, 0)),
3093 invert_truthvalue (TREE_OPERAND (arg, 1)));
3095 case TRUTH_NOT_EXPR:
3096 return TREE_OPERAND (arg, 0);
3100 tree arg1 = TREE_OPERAND (arg, 1);
3101 tree arg2 = TREE_OPERAND (arg, 2);
3102 /* A COND_EXPR may have a throw as one operand, which
3103 then has void type. Just leave void operands
3105 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3106 VOID_TYPE_P (TREE_TYPE (arg1))
3107 ? arg1 : invert_truthvalue (arg1),
3108 VOID_TYPE_P (TREE_TYPE (arg2))
3109 ? arg2 : invert_truthvalue (arg2));
3113 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3114 invert_truthvalue (TREE_OPERAND (arg, 1)));
3116 case NON_LVALUE_EXPR:
3117 return invert_truthvalue (TREE_OPERAND (arg, 0));
3120 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3125 return build1 (TREE_CODE (arg), type,
3126 invert_truthvalue (TREE_OPERAND (arg, 0)));
3129 if (!integer_onep (TREE_OPERAND (arg, 1)))
3131 return build2 (EQ_EXPR, type, arg,
3132 build_int_cst (type, 0));
3135 return build1 (TRUTH_NOT_EXPR, type, arg);
3137 case CLEANUP_POINT_EXPR:
3138 return build1 (CLEANUP_POINT_EXPR, type,
3139 invert_truthvalue (TREE_OPERAND (arg, 0)));
3144 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3145 return build1 (TRUTH_NOT_EXPR, type, arg);
3148 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3149 operands are another bit-wise operation with a common input. If so,
3150 distribute the bit operations to save an operation and possibly two if
3151 constants are involved. For example, convert
3152 (A | B) & (A | C) into A | (B & C)
3153 Further simplification will occur if B and C are constants.
3155 If this optimization cannot be done, 0 will be returned. */
3158 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3163 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3164 || TREE_CODE (arg0) == code
3165 || (TREE_CODE (arg0) != BIT_AND_EXPR
3166 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3169 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3171 common = TREE_OPERAND (arg0, 0);
3172 left = TREE_OPERAND (arg0, 1);
3173 right = TREE_OPERAND (arg1, 1);
3175 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3177 common = TREE_OPERAND (arg0, 0);
3178 left = TREE_OPERAND (arg0, 1);
3179 right = TREE_OPERAND (arg1, 0);
3181 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3183 common = TREE_OPERAND (arg0, 1);
3184 left = TREE_OPERAND (arg0, 0);
3185 right = TREE_OPERAND (arg1, 1);
3187 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3189 common = TREE_OPERAND (arg0, 1);
3190 left = TREE_OPERAND (arg0, 0);
3191 right = TREE_OPERAND (arg1, 0);
3196 return fold_build2 (TREE_CODE (arg0), type, common,
3197 fold_build2 (code, type, left, right));
3200 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3201 with code CODE. This optimization is unsafe. */
3203 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3205 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3206 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3208 /* (A / C) +- (B / C) -> (A +- B) / C. */
3210 && operand_equal_p (TREE_OPERAND (arg0, 1),
3211 TREE_OPERAND (arg1, 1), 0))
3212 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3213 fold_build2 (code, type,
3214 TREE_OPERAND (arg0, 0),
3215 TREE_OPERAND (arg1, 0)),
3216 TREE_OPERAND (arg0, 1));
3218 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3219 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3220 TREE_OPERAND (arg1, 0), 0)
3221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3222 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3224 REAL_VALUE_TYPE r0, r1;
3225 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3226 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3228 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3230 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3231 real_arithmetic (&r0, code, &r0, &r1);
3232 return fold_build2 (MULT_EXPR, type,
3233 TREE_OPERAND (arg0, 0),
3234 build_real (type, r0));
3240 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3241 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3244 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3251 tree size = TYPE_SIZE (TREE_TYPE (inner));
3252 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3253 || POINTER_TYPE_P (TREE_TYPE (inner)))
3254 && host_integerp (size, 0)
3255 && tree_low_cst (size, 0) == bitsize)
3256 return fold_convert (type, inner);
3259 result = build3 (BIT_FIELD_REF, type, inner,
3260 size_int (bitsize), bitsize_int (bitpos));
3262 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3267 /* Optimize a bit-field compare.
3269 There are two cases: First is a compare against a constant and the
3270 second is a comparison of two items where the fields are at the same
3271 bit position relative to the start of a chunk (byte, halfword, word)
3272 large enough to contain it. In these cases we can avoid the shift
3273 implicit in bitfield extractions.
3275 For constants, we emit a compare of the shifted constant with the
3276 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3277 compared. For two fields at the same position, we do the ANDs with the
3278 similar mask and compare the result of the ANDs.
3280 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3281 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3282 are the left and right operands of the comparison, respectively.
3284 If the optimization described above can be done, we return the resulting
3285 tree. Otherwise we return zero. */
3288 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3291 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3292 tree type = TREE_TYPE (lhs);
3293 tree signed_type, unsigned_type;
3294 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3295 enum machine_mode lmode, rmode, nmode;
3296 int lunsignedp, runsignedp;
3297 int lvolatilep = 0, rvolatilep = 0;
3298 tree linner, rinner = NULL_TREE;
3302 /* Get all the information about the extractions being done. If the bit size
3303 if the same as the size of the underlying object, we aren't doing an
3304 extraction at all and so can do nothing. We also don't want to
3305 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3306 then will no longer be able to replace it. */
3307 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3308 &lunsignedp, &lvolatilep, false);
3309 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3310 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3315 /* If this is not a constant, we can only do something if bit positions,
3316 sizes, and signedness are the same. */
3317 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3318 &runsignedp, &rvolatilep, false);
3320 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3321 || lunsignedp != runsignedp || offset != 0
3322 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3326 /* See if we can find a mode to refer to this field. We should be able to,
3327 but fail if we can't. */
3328 nmode = get_best_mode (lbitsize, lbitpos,
3329 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3330 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3331 TYPE_ALIGN (TREE_TYPE (rinner))),
3332 word_mode, lvolatilep || rvolatilep);
3333 if (nmode == VOIDmode)
3336 /* Set signed and unsigned types of the precision of this mode for the
3338 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3339 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3341 /* Compute the bit position and size for the new reference and our offset
3342 within it. If the new reference is the same size as the original, we
3343 won't optimize anything, so return zero. */
3344 nbitsize = GET_MODE_BITSIZE (nmode);
3345 nbitpos = lbitpos & ~ (nbitsize - 1);
3347 if (nbitsize == lbitsize)
3350 if (BYTES_BIG_ENDIAN)
3351 lbitpos = nbitsize - lbitsize - lbitpos;
3353 /* Make the mask to be used against the extracted field. */
3354 mask = build_int_cst (unsigned_type, -1);
3355 mask = force_fit_type (mask, 0, false, false);
3356 mask = fold_convert (unsigned_type, mask);
3357 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3358 mask = const_binop (RSHIFT_EXPR, mask,
3359 size_int (nbitsize - lbitsize - lbitpos), 0);
3362 /* If not comparing with constant, just rework the comparison
3364 return build2 (code, compare_type,
3365 build2 (BIT_AND_EXPR, unsigned_type,
3366 make_bit_field_ref (linner, unsigned_type,
3367 nbitsize, nbitpos, 1),
3369 build2 (BIT_AND_EXPR, unsigned_type,
3370 make_bit_field_ref (rinner, unsigned_type,
3371 nbitsize, nbitpos, 1),
3374 /* Otherwise, we are handling the constant case. See if the constant is too
3375 big for the field. Warn and return a tree of for 0 (false) if so. We do
3376 this not only for its own sake, but to avoid having to test for this
3377 error case below. If we didn't, we might generate wrong code.
3379 For unsigned fields, the constant shifted right by the field length should
3380 be all zero. For signed fields, the high-order bits should agree with
3385 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3386 fold_convert (unsigned_type, rhs),
3387 size_int (lbitsize), 0)))
3389 warning (0, "comparison is always %d due to width of bit-field",
3391 return constant_boolean_node (code == NE_EXPR, compare_type);
3396 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3397 size_int (lbitsize - 1), 0);
3398 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3400 warning (0, "comparison is always %d due to width of bit-field",
3402 return constant_boolean_node (code == NE_EXPR, compare_type);
3406 /* Single-bit compares should always be against zero. */
3407 if (lbitsize == 1 && ! integer_zerop (rhs))
3409 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3410 rhs = build_int_cst (type, 0);
3413 /* Make a new bitfield reference, shift the constant over the
3414 appropriate number of bits and mask it with the computed mask
3415 (in case this was a signed field). If we changed it, make a new one. */
3416 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3419 TREE_SIDE_EFFECTS (lhs) = 1;
3420 TREE_THIS_VOLATILE (lhs) = 1;
3423 rhs = const_binop (BIT_AND_EXPR,
3424 const_binop (LSHIFT_EXPR,
3425 fold_convert (unsigned_type, rhs),
3426 size_int (lbitpos), 0),
3429 return build2 (code, compare_type,
3430 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3434 /* Subroutine for fold_truthop: decode a field reference.
3436 If EXP is a comparison reference, we return the innermost reference.
3438 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3439 set to the starting bit number.
3441 If the innermost field can be completely contained in a mode-sized
3442 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3444 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3445 otherwise it is not changed.
3447 *PUNSIGNEDP is set to the signedness of the field.
3449 *PMASK is set to the mask used. This is either contained in a
3450 BIT_AND_EXPR or derived from the width of the field.
3452 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3454 Return 0 if this is not a component reference or is one that we can't
3455 do anything with. */
3458 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3459 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3460 int *punsignedp, int *pvolatilep,
3461 tree *pmask, tree *pand_mask)
3463 tree outer_type = 0;
3465 tree mask, inner, offset;
3467 unsigned int precision;
3469 /* All the optimizations using this function assume integer fields.
3470 There are problems with FP fields since the type_for_size call
3471 below can fail for, e.g., XFmode. */
3472 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3475 /* We are interested in the bare arrangement of bits, so strip everything
3476 that doesn't affect the machine mode. However, record the type of the
3477 outermost expression if it may matter below. */
3478 if (TREE_CODE (exp) == NOP_EXPR
3479 || TREE_CODE (exp) == CONVERT_EXPR
3480 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3481 outer_type = TREE_TYPE (exp);
3484 if (TREE_CODE (exp) == BIT_AND_EXPR)
3486 and_mask = TREE_OPERAND (exp, 1);
3487 exp = TREE_OPERAND (exp, 0);
3488 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3489 if (TREE_CODE (and_mask) != INTEGER_CST)
3493 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3494 punsignedp, pvolatilep, false);
3495 if ((inner == exp && and_mask == 0)
3496 || *pbitsize < 0 || offset != 0
3497 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3500 /* If the number of bits in the reference is the same as the bitsize of
3501 the outer type, then the outer type gives the signedness. Otherwise
3502 (in case of a small bitfield) the signedness is unchanged. */
3503 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3504 *punsignedp = TYPE_UNSIGNED (outer_type);
3506 /* Compute the mask to access the bitfield. */
3507 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3508 precision = TYPE_PRECISION (unsigned_type);
3510 mask = build_int_cst (unsigned_type, -1);
3511 mask = force_fit_type (mask, 0, false, false);
3513 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3514 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3516 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3518 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3519 fold_convert (unsigned_type, and_mask), mask);
3522 *pand_mask = and_mask;
3526 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3530 all_ones_mask_p (tree mask, int size)
3532 tree type = TREE_TYPE (mask);
3533 unsigned int precision = TYPE_PRECISION (type);
3536 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3537 tmask = force_fit_type (tmask, 0, false, false);
3540 tree_int_cst_equal (mask,
3541 const_binop (RSHIFT_EXPR,
3542 const_binop (LSHIFT_EXPR, tmask,
3543 size_int (precision - size),
3545 size_int (precision - size), 0));
3548 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3549 represents the sign bit of EXP's type. If EXP represents a sign
3550 or zero extension, also test VAL against the unextended type.
3551 The return value is the (sub)expression whose sign bit is VAL,
3552 or NULL_TREE otherwise. */
3555 sign_bit_p (tree exp, tree val)
3557 unsigned HOST_WIDE_INT mask_lo, lo;
3558 HOST_WIDE_INT mask_hi, hi;
3562 /* Tree EXP must have an integral type. */
3563 t = TREE_TYPE (exp);
3564 if (! INTEGRAL_TYPE_P (t))
3567 /* Tree VAL must be an integer constant. */
3568 if (TREE_CODE (val) != INTEGER_CST
3569 || TREE_CONSTANT_OVERFLOW (val))
3572 width = TYPE_PRECISION (t);
3573 if (width > HOST_BITS_PER_WIDE_INT)
3575 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3578 mask_hi = ((unsigned HOST_WIDE_INT) -1
3579 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3585 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3588 mask_lo = ((unsigned HOST_WIDE_INT) -1
3589 >> (HOST_BITS_PER_WIDE_INT - width));
3592 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3593 treat VAL as if it were unsigned. */
3594 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3595 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3598 /* Handle extension from a narrower type. */
3599 if (TREE_CODE (exp) == NOP_EXPR
3600 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3601 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3606 /* Subroutine for fold_truthop: determine if an operand is simple enough
3607 to be evaluated unconditionally. */
3610 simple_operand_p (tree exp)
3612 /* Strip any conversions that don't change the machine mode. */
3615 return (CONSTANT_CLASS_P (exp)
3616 || TREE_CODE (exp) == SSA_NAME
3618 && ! TREE_ADDRESSABLE (exp)
3619 && ! TREE_THIS_VOLATILE (exp)
3620 && ! DECL_NONLOCAL (exp)
3621 /* Don't regard global variables as simple. They may be
3622 allocated in ways unknown to the compiler (shared memory,
3623 #pragma weak, etc). */
3624 && ! TREE_PUBLIC (exp)
3625 && ! DECL_EXTERNAL (exp)
3626 /* Loading a static variable is unduly expensive, but global
3627 registers aren't expensive. */
3628 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3631 /* The following functions are subroutines to fold_range_test and allow it to
3632 try to change a logical combination of comparisons into a range test.
3635 X == 2 || X == 3 || X == 4 || X == 5
3639 (unsigned) (X - 2) <= 3
3641 We describe each set of comparisons as being either inside or outside
3642 a range, using a variable named like IN_P, and then describe the
3643 range with a lower and upper bound. If one of the bounds is omitted,
3644 it represents either the highest or lowest value of the type.
3646 In the comments below, we represent a range by two numbers in brackets
3647 preceded by a "+" to designate being inside that range, or a "-" to
3648 designate being outside that range, so the condition can be inverted by
3649 flipping the prefix. An omitted bound is represented by a "-". For
3650 example, "- [-, 10]" means being outside the range starting at the lowest
3651 possible value and ending at 10, in other words, being greater than 10.
3652 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3655 We set up things so that the missing bounds are handled in a consistent
3656 manner so neither a missing bound nor "true" and "false" need to be
3657 handled using a special case. */
3659 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3660 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3661 and UPPER1_P are nonzero if the respective argument is an upper bound
3662 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3663 must be specified for a comparison. ARG1 will be converted to ARG0's
3664 type if both are specified. */
3667 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3668 tree arg1, int upper1_p)
3674 /* If neither arg represents infinity, do the normal operation.
3675 Else, if not a comparison, return infinity. Else handle the special
3676 comparison rules. Note that most of the cases below won't occur, but
3677 are handled for consistency. */
3679 if (arg0 != 0 && arg1 != 0)
3681 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3682 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3684 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3687 if (TREE_CODE_CLASS (code) != tcc_comparison)
3690 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3691 for neither. In real maths, we cannot assume open ended ranges are
3692 the same. But, this is computer arithmetic, where numbers are finite.
3693 We can therefore make the transformation of any unbounded range with
3694 the value Z, Z being greater than any representable number. This permits
3695 us to treat unbounded ranges as equal. */
3696 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3697 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3701 result = sgn0 == sgn1;
3704 result = sgn0 != sgn1;
3707 result = sgn0 < sgn1;
3710 result = sgn0 <= sgn1;
3713 result = sgn0 > sgn1;
3716 result = sgn0 >= sgn1;
3722 return constant_boolean_node (result, type);
3725 /* Given EXP, a logical expression, set the range it is testing into
3726 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3727 actually being tested. *PLOW and *PHIGH will be made of the same type
3728 as the returned expression. If EXP is not a comparison, we will most
3729 likely not be returning a useful value and range. */
3732 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3734 enum tree_code code;
3735 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3736 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3738 tree low, high, n_low, n_high;
3740 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3741 and see if we can refine the range. Some of the cases below may not
3742 happen, but it doesn't seem worth worrying about this. We "continue"
3743 the outer loop when we've changed something; otherwise we "break"
3744 the switch, which will "break" the while. */
3747 low = high = build_int_cst (TREE_TYPE (exp), 0);
3751 code = TREE_CODE (exp);
3752 exp_type = TREE_TYPE (exp);
3754 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3756 if (TREE_CODE_LENGTH (code) > 0)
3757 arg0 = TREE_OPERAND (exp, 0);
3758 if (TREE_CODE_CLASS (code) == tcc_comparison
3759 || TREE_CODE_CLASS (code) == tcc_unary
3760 || TREE_CODE_CLASS (code) == tcc_binary)
3761 arg0_type = TREE_TYPE (arg0);
3762 if (TREE_CODE_CLASS (code) == tcc_binary
3763 || TREE_CODE_CLASS (code) == tcc_comparison
3764 || (TREE_CODE_CLASS (code) == tcc_expression
3765 && TREE_CODE_LENGTH (code) > 1))
3766 arg1 = TREE_OPERAND (exp, 1);
3771 case TRUTH_NOT_EXPR:
3772 in_p = ! in_p, exp = arg0;
3775 case EQ_EXPR: case NE_EXPR:
3776 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3777 /* We can only do something if the range is testing for zero
3778 and if the second operand is an integer constant. Note that
3779 saying something is "in" the range we make is done by
3780 complementing IN_P since it will set in the initial case of
3781 being not equal to zero; "out" is leaving it alone. */
3782 if (low == 0 || high == 0
3783 || ! integer_zerop (low) || ! integer_zerop (high)
3784 || TREE_CODE (arg1) != INTEGER_CST)
3789 case NE_EXPR: /* - [c, c] */
3792 case EQ_EXPR: /* + [c, c] */
3793 in_p = ! in_p, low = high = arg1;
3795 case GT_EXPR: /* - [-, c] */
3796 low = 0, high = arg1;
3798 case GE_EXPR: /* + [c, -] */
3799 in_p = ! in_p, low = arg1, high = 0;
3801 case LT_EXPR: /* - [c, -] */
3802 low = arg1, high = 0;
3804 case LE_EXPR: /* + [-, c] */
3805 in_p = ! in_p, low = 0, high = arg1;
3811 /* If this is an unsigned comparison, we also know that EXP is
3812 greater than or equal to zero. We base the range tests we make
3813 on that fact, so we record it here so we can parse existing
3814 range tests. We test arg0_type since often the return type
3815 of, e.g. EQ_EXPR, is boolean. */
3816 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3818 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3820 build_int_cst (arg0_type, 0),
3824 in_p = n_in_p, low = n_low, high = n_high;
3826 /* If the high bound is missing, but we have a nonzero low
3827 bound, reverse the range so it goes from zero to the low bound
3829 if (high == 0 && low && ! integer_zerop (low))
3832 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3833 integer_one_node, 0);
3834 low = build_int_cst (arg0_type, 0);
3842 /* (-x) IN [a,b] -> x in [-b, -a] */
3843 n_low = range_binop (MINUS_EXPR, exp_type,
3844 build_int_cst (exp_type, 0),
3846 n_high = range_binop (MINUS_EXPR, exp_type,
3847 build_int_cst (exp_type, 0),
3849 low = n_low, high = n_high;
3855 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3856 build_int_cst (exp_type, 1));
3859 case PLUS_EXPR: case MINUS_EXPR:
3860 if (TREE_CODE (arg1) != INTEGER_CST)
3863 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3864 move a constant to the other side. */
3865 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3868 /* If EXP is signed, any overflow in the computation is undefined,
3869 so we don't worry about it so long as our computations on
3870 the bounds don't overflow. For unsigned, overflow is defined
3871 and this is exactly the right thing. */
3872 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3873 arg0_type, low, 0, arg1, 0);
3874 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3875 arg0_type, high, 1, arg1, 0);
3876 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3877 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3880 /* Check for an unsigned range which has wrapped around the maximum
3881 value thus making n_high < n_low, and normalize it. */
3882 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3884 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3885 integer_one_node, 0);
3886 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3887 integer_one_node, 0);
3889 /* If the range is of the form +/- [ x+1, x ], we won't
3890 be able to normalize it. But then, it represents the
3891 whole range or the empty set, so make it
3893 if (tree_int_cst_equal (n_low, low)
3894 && tree_int_cst_equal (n_high, high))
3900 low = n_low, high = n_high;
3905 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3906 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3909 if (! INTEGRAL_TYPE_P (arg0_type)
3910 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3911 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3914 n_low = low, n_high = high;
3917 n_low = fold_convert (arg0_type, n_low);
3920 n_high = fold_convert (arg0_type, n_high);
3923 /* If we're converting arg0 from an unsigned type, to exp,
3924 a signed type, we will be doing the comparison as unsigned.
3925 The tests above have already verified that LOW and HIGH
3928 So we have to ensure that we will handle large unsigned
3929 values the same way that the current signed bounds treat
3932 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3935 tree equiv_type = lang_hooks.types.type_for_mode
3936 (TYPE_MODE (arg0_type), 1);
3938 /* A range without an upper bound is, naturally, unbounded.
3939 Since convert would have cropped a very large value, use
3940 the max value for the destination type. */
3942 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3943 : TYPE_MAX_VALUE (arg0_type);
3945 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3946 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3947 fold_convert (arg0_type,
3949 fold_convert (arg0_type,
3952 /* If the low bound is specified, "and" the range with the
3953 range for which the original unsigned value will be
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 1, n_low, n_high, 1,
3959 fold_convert (arg0_type,
3964 in_p = (n_in_p == in_p);
3968 /* Otherwise, "or" the range with the range of the input
3969 that will be interpreted as negative. */
3970 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3971 0, n_low, n_high, 1,
3972 fold_convert (arg0_type,
3977 in_p = (in_p != n_in_p);
3982 low = n_low, high = n_high;
3992 /* If EXP is a constant, we can evaluate whether this is true or false. */
3993 if (TREE_CODE (exp) == INTEGER_CST)
3995 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3997 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4003 *pin_p = in_p, *plow = low, *phigh = high;
4007 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4008 type, TYPE, return an expression to test if EXP is in (or out of, depending
4009 on IN_P) the range. Return 0 if the test couldn't be created. */
4012 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4014 tree etype = TREE_TYPE (exp);
4017 #ifdef HAVE_canonicalize_funcptr_for_compare
4018 /* Disable this optimization for function pointer expressions
4019 on targets that require function pointer canonicalization. */
4020 if (HAVE_canonicalize_funcptr_for_compare
4021 && TREE_CODE (etype) == POINTER_TYPE
4022 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4028 value = build_range_check (type, exp, 1, low, high);
4030 return invert_truthvalue (value);
4035 if (low == 0 && high == 0)
4036 return build_int_cst (type, 1);
4039 return fold_build2 (LE_EXPR, type, exp,
4040 fold_convert (etype, high));
4043 return fold_build2 (GE_EXPR, type, exp,
4044 fold_convert (etype, low));
4046 if (operand_equal_p (low, high, 0))
4047 return fold_build2 (EQ_EXPR, type, exp,
4048 fold_convert (etype, low));
4050 if (integer_zerop (low))
4052 if (! TYPE_UNSIGNED (etype))
4054 etype = lang_hooks.types.unsigned_type (etype);
4055 high = fold_convert (etype, high);
4056 exp = fold_convert (etype, exp);
4058 return build_range_check (type, exp, 1, 0, high);
4061 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4062 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4064 unsigned HOST_WIDE_INT lo;
4068 prec = TYPE_PRECISION (etype);
4069 if (prec <= HOST_BITS_PER_WIDE_INT)
4072 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4076 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4077 lo = (unsigned HOST_WIDE_INT) -1;
4080 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4082 if (TYPE_UNSIGNED (etype))
4084 etype = lang_hooks.types.signed_type (etype);
4085 exp = fold_convert (etype, exp);
4087 return fold_build2 (GT_EXPR, type, exp,
4088 build_int_cst (etype, 0));
4092 value = const_binop (MINUS_EXPR, high, low, 0);
4093 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4094 && ! TYPE_UNSIGNED (etype))
4096 tree utype, minv, maxv;
4098 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4099 for the type in question, as we rely on this here. */
4100 switch (TREE_CODE (etype))
4104 /* There is no requirement that LOW be within the range of ETYPE
4105 if the latter is a subtype. It must, however, be within the base
4106 type of ETYPE. So be sure we do the subtraction in that type. */
4107 if (TREE_TYPE (etype))
4108 etype = TREE_TYPE (etype);
4109 utype = lang_hooks.types.unsigned_type (etype);
4110 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4111 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4112 integer_one_node, 1);
4113 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4114 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4118 high = fold_convert (etype, high);
4119 low = fold_convert (etype, low);
4120 exp = fold_convert (etype, exp);
4121 value = const_binop (MINUS_EXPR, high, low, 0);
4129 if (value != 0 && ! TREE_OVERFLOW (value))
4131 /* There is no requirement that LOW be within the range of ETYPE
4132 if the latter is a subtype. It must, however, be within the base
4133 type of ETYPE. So be sure we do the subtraction in that type. */
4134 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4136 etype = TREE_TYPE (etype);
4137 exp = fold_convert (etype, exp);
4138 low = fold_convert (etype, low);
4139 value = fold_convert (etype, value);
4142 return build_range_check (type,
4143 fold_build2 (MINUS_EXPR, etype, exp, low),
4144 1, build_int_cst (etype, 0), value);
4150 /* Given two ranges, see if we can merge them into one. Return 1 if we
4151 can, 0 if we can't. Set the output range into the specified parameters. */
4154 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4155 tree high0, int in1_p, tree low1, tree high1)
4163 int lowequal = ((low0 == 0 && low1 == 0)
4164 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4165 low0, 0, low1, 0)));
4166 int highequal = ((high0 == 0 && high1 == 0)
4167 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4168 high0, 1, high1, 1)));
4170 /* Make range 0 be the range that starts first, or ends last if they
4171 start at the same value. Swap them if it isn't. */
4172 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4175 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4176 high1, 1, high0, 1))))
4178 temp = in0_p, in0_p = in1_p, in1_p = temp;
4179 tem = low0, low0 = low1, low1 = tem;
4180 tem = high0, high0 = high1, high1 = tem;
4183 /* Now flag two cases, whether the ranges are disjoint or whether the
4184 second range is totally subsumed in the first. Note that the tests
4185 below are simplified by the ones above. */
4186 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4187 high0, 1, low1, 0));
4188 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4189 high1, 1, high0, 1));
4191 /* We now have four cases, depending on whether we are including or
4192 excluding the two ranges. */
4195 /* If they don't overlap, the result is false. If the second range
4196 is a subset it is the result. Otherwise, the range is from the start
4197 of the second to the end of the first. */
4199 in_p = 0, low = high = 0;
4201 in_p = 1, low = low1, high = high1;
4203 in_p = 1, low = low1, high = high0;
4206 else if (in0_p && ! in1_p)
4208 /* If they don't overlap, the result is the first range. If they are
4209 equal, the result is false. If the second range is a subset of the
4210 first, and the ranges begin at the same place, we go from just after
4211 the end of the first range to the end of the second. If the second
4212 range is not a subset of the first, or if it is a subset and both
4213 ranges end at the same place, the range starts at the start of the
4214 first range and ends just before the second range.
4215 Otherwise, we can't describe this as a single range. */
4217 in_p = 1, low = low0, high = high0;
4218 else if (lowequal && highequal)
4219 in_p = 0, low = high = 0;
4220 else if (subset && lowequal)
4222 in_p = 1, high = high0;
4223 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4224 integer_one_node, 0);
4226 else if (! subset || highequal)
4228 in_p = 1, low = low0;
4229 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4230 integer_one_node, 0);
4236 else if (! in0_p && in1_p)
4238 /* If they don't overlap, the result is the second range. If the second
4239 is a subset of the first, the result is false. Otherwise,
4240 the range starts just after the first range and ends at the
4241 end of the second. */
4243 in_p = 1, low = low1, high = high1;
4244 else if (subset || highequal)
4245 in_p = 0, low = high = 0;
4248 in_p = 1, high = high1;
4249 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4250 integer_one_node, 0);
4256 /* The case where we are excluding both ranges. Here the complex case
4257 is if they don't overlap. In that case, the only time we have a
4258 range is if they are adjacent. If the second is a subset of the
4259 first, the result is the first. Otherwise, the range to exclude
4260 starts at the beginning of the first range and ends at the end of the
4264 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4265 range_binop (PLUS_EXPR, NULL_TREE,
4267 integer_one_node, 1),
4269 in_p = 0, low = low0, high = high1;
4272 /* Canonicalize - [min, x] into - [-, x]. */
4273 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4274 switch (TREE_CODE (TREE_TYPE (low0)))
4277 if (TYPE_PRECISION (TREE_TYPE (low0))
4278 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4282 if (tree_int_cst_equal (low0,
4283 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4287 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4288 && integer_zerop (low0))
4295 /* Canonicalize - [x, max] into - [x, -]. */
4296 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4297 switch (TREE_CODE (TREE_TYPE (high1)))
4300 if (TYPE_PRECISION (TREE_TYPE (high1))
4301 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4305 if (tree_int_cst_equal (high1,
4306 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4310 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4311 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4313 integer_one_node, 1)))
4320 /* The ranges might be also adjacent between the maximum and
4321 minimum values of the given type. For
4322 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4323 return + [x + 1, y - 1]. */
4324 if (low0 == 0 && high1 == 0)
4326 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4327 integer_one_node, 1);
4328 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4329 integer_one_node, 0);
4330 if (low == 0 || high == 0)
4340 in_p = 0, low = low0, high = high0;
4342 in_p = 0, low = low0, high = high1;
4345 *pin_p = in_p, *plow = low, *phigh = high;
4350 /* Subroutine of fold, looking inside expressions of the form
4351 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4352 of the COND_EXPR. This function is being used also to optimize
4353 A op B ? C : A, by reversing the comparison first.
4355 Return a folded expression whose code is not a COND_EXPR
4356 anymore, or NULL_TREE if no folding opportunity is found. */
4359 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4361 enum tree_code comp_code = TREE_CODE (arg0);
4362 tree arg00 = TREE_OPERAND (arg0, 0);
4363 tree arg01 = TREE_OPERAND (arg0, 1);
4364 tree arg1_type = TREE_TYPE (arg1);
4370 /* If we have A op 0 ? A : -A, consider applying the following
4373 A == 0? A : -A same as -A
4374 A != 0? A : -A same as A
4375 A >= 0? A : -A same as abs (A)
4376 A > 0? A : -A same as abs (A)
4377 A <= 0? A : -A same as -abs (A)
4378 A < 0? A : -A same as -abs (A)
4380 None of these transformations work for modes with signed
4381 zeros. If A is +/-0, the first two transformations will
4382 change the sign of the result (from +0 to -0, or vice
4383 versa). The last four will fix the sign of the result,
4384 even though the original expressions could be positive or
4385 negative, depending on the sign of A.
4387 Note that all these transformations are correct if A is
4388 NaN, since the two alternatives (A and -A) are also NaNs. */
4389 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4390 ? real_zerop (arg01)
4391 : integer_zerop (arg01))
4392 && ((TREE_CODE (arg2) == NEGATE_EXPR
4393 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4394 /* In the case that A is of the form X-Y, '-A' (arg2) may
4395 have already been folded to Y-X, check for that. */
4396 || (TREE_CODE (arg1) == MINUS_EXPR
4397 && TREE_CODE (arg2) == MINUS_EXPR
4398 && operand_equal_p (TREE_OPERAND (arg1, 0),
4399 TREE_OPERAND (arg2, 1), 0)
4400 && operand_equal_p (TREE_OPERAND (arg1, 1),
4401 TREE_OPERAND (arg2, 0), 0))))
4406 tem = fold_convert (arg1_type, arg1);
4407 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4410 return pedantic_non_lvalue (fold_convert (type, arg1));
4413 if (flag_trapping_math)
4418 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4419 arg1 = fold_convert (lang_hooks.types.signed_type
4420 (TREE_TYPE (arg1)), arg1);
4421 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4422 return pedantic_non_lvalue (fold_convert (type, tem));
4425 if (flag_trapping_math)
4429 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4430 arg1 = fold_convert (lang_hooks.types.signed_type
4431 (TREE_TYPE (arg1)), arg1);
4432 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4433 return negate_expr (fold_convert (type, tem));
4435 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4439 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4440 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4441 both transformations are correct when A is NaN: A != 0
4442 is then true, and A == 0 is false. */
4444 if (integer_zerop (arg01) && integer_zerop (arg2))
4446 if (comp_code == NE_EXPR)
4447 return pedantic_non_lvalue (fold_convert (type, arg1));
4448 else if (comp_code == EQ_EXPR)
4449 return build_int_cst (type, 0);
4452 /* Try some transformations of A op B ? A : B.
4454 A == B? A : B same as B
4455 A != B? A : B same as A
4456 A >= B? A : B same as max (A, B)
4457 A > B? A : B same as max (B, A)
4458 A <= B? A : B same as min (A, B)
4459 A < B? A : B same as min (B, A)
4461 As above, these transformations don't work in the presence
4462 of signed zeros. For example, if A and B are zeros of
4463 opposite sign, the first two transformations will change
4464 the sign of the result. In the last four, the original
4465 expressions give different results for (A=+0, B=-0) and
4466 (A=-0, B=+0), but the transformed expressions do not.
4468 The first two transformations are correct if either A or B
4469 is a NaN. In the first transformation, the condition will
4470 be false, and B will indeed be chosen. In the case of the
4471 second transformation, the condition A != B will be true,
4472 and A will be chosen.
4474 The conversions to max() and min() are not correct if B is
4475 a number and A is not. The conditions in the original
4476 expressions will be false, so all four give B. The min()
4477 and max() versions would give a NaN instead. */
4478 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4479 /* Avoid these transformations if the COND_EXPR may be used
4480 as an lvalue in the C++ front-end. PR c++/19199. */
4482 || strcmp (lang_hooks.name, "GNU C++") != 0
4483 || ! maybe_lvalue_p (arg1)
4484 || ! maybe_lvalue_p (arg2)))
4486 tree comp_op0 = arg00;
4487 tree comp_op1 = arg01;
4488 tree comp_type = TREE_TYPE (comp_op0);
4490 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4491 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4501 return pedantic_non_lvalue (fold_convert (type, arg2));
4503 return pedantic_non_lvalue (fold_convert (type, arg1));
4508 /* In C++ a ?: expression can be an lvalue, so put the
4509 operand which will be used if they are equal first
4510 so that we can convert this back to the
4511 corresponding COND_EXPR. */
4512 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4514 comp_op0 = fold_convert (comp_type, comp_op0);
4515 comp_op1 = fold_convert (comp_type, comp_op1);
4516 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4517 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4518 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4519 return pedantic_non_lvalue (fold_convert (type, tem));
4526 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4528 comp_op0 = fold_convert (comp_type, comp_op0);
4529 comp_op1 = fold_convert (comp_type, comp_op1);
4530 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4531 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4532 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4533 return pedantic_non_lvalue (fold_convert (type, tem));
4537 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4538 return pedantic_non_lvalue (fold_convert (type, arg2));
4541 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4542 return pedantic_non_lvalue (fold_convert (type, arg1));
4545 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4550 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4551 we might still be able to simplify this. For example,
4552 if C1 is one less or one more than C2, this might have started
4553 out as a MIN or MAX and been transformed by this function.
4554 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4556 if (INTEGRAL_TYPE_P (type)
4557 && TREE_CODE (arg01) == INTEGER_CST
4558 && TREE_CODE (arg2) == INTEGER_CST)
4562 /* We can replace A with C1 in this case. */
4563 arg1 = fold_convert (type, arg01);
4564 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4567 /* If C1 is C2 + 1, this is min(A, C2). */
4568 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4570 && operand_equal_p (arg01,
4571 const_binop (PLUS_EXPR, arg2,
4572 integer_one_node, 0),
4574 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4579 /* If C1 is C2 - 1, this is min(A, C2). */
4580 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4582 && operand_equal_p (arg01,
4583 const_binop (MINUS_EXPR, arg2,
4584 integer_one_node, 0),
4586 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4591 /* If C1 is C2 - 1, this is max(A, C2). */
4592 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4594 && operand_equal_p (arg01,
4595 const_binop (MINUS_EXPR, arg2,
4596 integer_one_node, 0),
4598 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4603 /* If C1 is C2 + 1, this is max(A, C2). */
4604 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4606 && operand_equal_p (arg01,
4607 const_binop (PLUS_EXPR, arg2,
4608 integer_one_node, 0),
4610 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4624 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4625 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4628 /* EXP is some logical combination of boolean tests. See if we can
4629 merge it into some range test. Return the new tree if so. */
4632 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4634 int or_op = (code == TRUTH_ORIF_EXPR
4635 || code == TRUTH_OR_EXPR);
4636 int in0_p, in1_p, in_p;
4637 tree low0, low1, low, high0, high1, high;
4638 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4639 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4642 /* If this is an OR operation, invert both sides; we will invert
4643 again at the end. */
4645 in0_p = ! in0_p, in1_p = ! in1_p;
4647 /* If both expressions are the same, if we can merge the ranges, and we
4648 can build the range test, return it or it inverted. If one of the
4649 ranges is always true or always false, consider it to be the same
4650 expression as the other. */
4651 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4652 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4654 && 0 != (tem = (build_range_check (type,
4656 : rhs != 0 ? rhs : integer_zero_node,
4658 return or_op ? invert_truthvalue (tem) : tem;
4660 /* On machines where the branch cost is expensive, if this is a
4661 short-circuited branch and the underlying object on both sides
4662 is the same, make a non-short-circuit operation. */
4663 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4664 && lhs != 0 && rhs != 0
4665 && (code == TRUTH_ANDIF_EXPR
4666 || code == TRUTH_ORIF_EXPR)
4667 && operand_equal_p (lhs, rhs, 0))
4669 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4670 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4671 which cases we can't do this. */
4672 if (simple_operand_p (lhs))
4673 return build2 (code == TRUTH_ANDIF_EXPR
4674 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4677 else if (lang_hooks.decls.global_bindings_p () == 0
4678 && ! CONTAINS_PLACEHOLDER_P (lhs))
4680 tree common = save_expr (lhs);
4682 if (0 != (lhs = build_range_check (type, common,
4683 or_op ? ! in0_p : in0_p,
4685 && (0 != (rhs = build_range_check (type, common,
4686 or_op ? ! in1_p : in1_p,
4688 return build2 (code == TRUTH_ANDIF_EXPR
4689 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4697 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4698 bit value. Arrange things so the extra bits will be set to zero if and
4699 only if C is signed-extended to its full width. If MASK is nonzero,
4700 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4703 unextend (tree c, int p, int unsignedp, tree mask)
4705 tree type = TREE_TYPE (c);
4706 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4709 if (p == modesize || unsignedp)
4712 /* We work by getting just the sign bit into the low-order bit, then
4713 into the high-order bit, then sign-extend. We then XOR that value
4715 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4716 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4718 /* We must use a signed type in order to get an arithmetic right shift.
4719 However, we must also avoid introducing accidental overflows, so that
4720 a subsequent call to integer_zerop will work. Hence we must
4721 do the type conversion here. At this point, the constant is either
4722 zero or one, and the conversion to a signed type can never overflow.
4723 We could get an overflow if this conversion is done anywhere else. */
4724 if (TYPE_UNSIGNED (type))
4725 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4727 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4728 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4730 temp = const_binop (BIT_AND_EXPR, temp,
4731 fold_convert (TREE_TYPE (c), mask), 0);
4732 /* If necessary, convert the type back to match the type of C. */
4733 if (TYPE_UNSIGNED (type))
4734 temp = fold_convert (type, temp);
4736 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4739 /* Find ways of folding logical expressions of LHS and RHS:
4740 Try to merge two comparisons to the same innermost item.
4741 Look for range tests like "ch >= '0' && ch <= '9'".
4742 Look for combinations of simple terms on machines with expensive branches
4743 and evaluate the RHS unconditionally.
4745 For example, if we have p->a == 2 && p->b == 4 and we can make an
4746 object large enough to span both A and B, we can do this with a comparison
4747 against the object ANDed with the a mask.
4749 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4750 operations to do this with one comparison.
4752 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4753 function and the one above.
4755 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4756 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4758 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4761 We return the simplified tree or 0 if no optimization is possible. */
4764 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4766 /* If this is the "or" of two comparisons, we can do something if
4767 the comparisons are NE_EXPR. If this is the "and", we can do something
4768 if the comparisons are EQ_EXPR. I.e.,
4769 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4771 WANTED_CODE is this operation code. For single bit fields, we can
4772 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4773 comparison for one-bit fields. */
4775 enum tree_code wanted_code;
4776 enum tree_code lcode, rcode;
4777 tree ll_arg, lr_arg, rl_arg, rr_arg;
4778 tree ll_inner, lr_inner, rl_inner, rr_inner;
4779 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4780 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4781 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4782 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4783 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4784 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4785 enum machine_mode lnmode, rnmode;
4786 tree ll_mask, lr_mask, rl_mask, rr_mask;
4787 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4788 tree l_const, r_const;
4789 tree lntype, rntype, result;
4790 int first_bit, end_bit;
4793 /* Start by getting the comparison codes. Fail if anything is volatile.
4794 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4795 it were surrounded with a NE_EXPR. */
4797 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4800 lcode = TREE_CODE (lhs);
4801 rcode = TREE_CODE (rhs);
4803 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4805 lhs = build2 (NE_EXPR, truth_type, lhs,
4806 build_int_cst (TREE_TYPE (lhs), 0));
4810 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4812 rhs = build2 (NE_EXPR, truth_type, rhs,
4813 build_int_cst (TREE_TYPE (rhs), 0));
4817 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4818 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4821 ll_arg = TREE_OPERAND (lhs, 0);
4822 lr_arg = TREE_OPERAND (lhs, 1);
4823 rl_arg = TREE_OPERAND (rhs, 0);
4824 rr_arg = TREE_OPERAND (rhs, 1);
4826 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4827 if (simple_operand_p (ll_arg)
4828 && simple_operand_p (lr_arg))
4831 if (operand_equal_p (ll_arg, rl_arg, 0)
4832 && operand_equal_p (lr_arg, rr_arg, 0))
4834 result = combine_comparisons (code, lcode, rcode,
4835 truth_type, ll_arg, lr_arg);
4839 else if (operand_equal_p (ll_arg, rr_arg, 0)
4840 && operand_equal_p (lr_arg, rl_arg, 0))
4842 result = combine_comparisons (code, lcode,
4843 swap_tree_comparison (rcode),
4844 truth_type, ll_arg, lr_arg);
4850 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4851 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4853 /* If the RHS can be evaluated unconditionally and its operands are
4854 simple, it wins to evaluate the RHS unconditionally on machines
4855 with expensive branches. In this case, this isn't a comparison
4856 that can be merged. Avoid doing this if the RHS is a floating-point
4857 comparison since those can trap. */
4859 if (BRANCH_COST >= 2
4860 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4861 && simple_operand_p (rl_arg)
4862 && simple_operand_p (rr_arg))
4864 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4865 if (code == TRUTH_OR_EXPR
4866 && lcode == NE_EXPR && integer_zerop (lr_arg)
4867 && rcode == NE_EXPR && integer_zerop (rr_arg)
4868 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4869 return build2 (NE_EXPR, truth_type,
4870 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4872 build_int_cst (TREE_TYPE (ll_arg), 0));
4874 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4875 if (code == TRUTH_AND_EXPR
4876 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4877 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4878 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4879 return build2 (EQ_EXPR, truth_type,
4880 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4882 build_int_cst (TREE_TYPE (ll_arg), 0));
4884 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4885 return build2 (code, truth_type, lhs, rhs);
4888 /* See if the comparisons can be merged. Then get all the parameters for
4891 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4892 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4896 ll_inner = decode_field_reference (ll_arg,
4897 &ll_bitsize, &ll_bitpos, &ll_mode,
4898 &ll_unsignedp, &volatilep, &ll_mask,
4900 lr_inner = decode_field_reference (lr_arg,
4901 &lr_bitsize, &lr_bitpos, &lr_mode,
4902 &lr_unsignedp, &volatilep, &lr_mask,
4904 rl_inner = decode_field_reference (rl_arg,
4905 &rl_bitsize, &rl_bitpos, &rl_mode,
4906 &rl_unsignedp, &volatilep, &rl_mask,
4908 rr_inner = decode_field_reference (rr_arg,
4909 &rr_bitsize, &rr_bitpos, &rr_mode,
4910 &rr_unsignedp, &volatilep, &rr_mask,
4913 /* It must be true that the inner operation on the lhs of each
4914 comparison must be the same if we are to be able to do anything.
4915 Then see if we have constants. If not, the same must be true for
4917 if (volatilep || ll_inner == 0 || rl_inner == 0
4918 || ! operand_equal_p (ll_inner, rl_inner, 0))
4921 if (TREE_CODE (lr_arg) == INTEGER_CST
4922 && TREE_CODE (rr_arg) == INTEGER_CST)
4923 l_const = lr_arg, r_const = rr_arg;
4924 else if (lr_inner == 0 || rr_inner == 0
4925 || ! operand_equal_p (lr_inner, rr_inner, 0))
4928 l_const = r_const = 0;
4930 /* If either comparison code is not correct for our logical operation,
4931 fail. However, we can convert a one-bit comparison against zero into
4932 the opposite comparison against that bit being set in the field. */
4934 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4935 if (lcode != wanted_code)
4937 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4939 /* Make the left operand unsigned, since we are only interested
4940 in the value of one bit. Otherwise we are doing the wrong
4949 /* This is analogous to the code for l_const above. */
4950 if (rcode != wanted_code)
4952 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4961 /* After this point all optimizations will generate bit-field
4962 references, which we might not want. */
4963 if (! lang_hooks.can_use_bit_fields_p ())
4966 /* See if we can find a mode that contains both fields being compared on
4967 the left. If we can't, fail. Otherwise, update all constants and masks
4968 to be relative to a field of that size. */
4969 first_bit = MIN (ll_bitpos, rl_bitpos);
4970 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4971 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4972 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4974 if (lnmode == VOIDmode)
4977 lnbitsize = GET_MODE_BITSIZE (lnmode);
4978 lnbitpos = first_bit & ~ (lnbitsize - 1);
4979 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4980 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4982 if (BYTES_BIG_ENDIAN)
4984 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4985 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4988 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4989 size_int (xll_bitpos), 0);
4990 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4991 size_int (xrl_bitpos), 0);
4995 l_const = fold_convert (lntype, l_const);
4996 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4997 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4998 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4999 fold_build1 (BIT_NOT_EXPR,
5003 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5005 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5010 r_const = fold_convert (lntype, r_const);
5011 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5012 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5013 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5014 fold_build1 (BIT_NOT_EXPR,
5018 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5020 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5024 /* If the right sides are not constant, do the same for it. Also,
5025 disallow this optimization if a size or signedness mismatch occurs
5026 between the left and right sides. */
5029 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5030 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5031 /* Make sure the two fields on the right
5032 correspond to the left without being swapped. */
5033 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5036 first_bit = MIN (lr_bitpos, rr_bitpos);
5037 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5038 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5039 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5041 if (rnmode == VOIDmode)
5044 rnbitsize = GET_MODE_BITSIZE (rnmode);
5045 rnbitpos = first_bit & ~ (rnbitsize - 1);
5046 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5047 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5049 if (BYTES_BIG_ENDIAN)
5051 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5052 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5055 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5056 size_int (xlr_bitpos), 0);
5057 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5058 size_int (xrr_bitpos), 0);
5060 /* Make a mask that corresponds to both fields being compared.
5061 Do this for both items being compared. If the operands are the
5062 same size and the bits being compared are in the same position
5063 then we can do this by masking both and comparing the masked
5065 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5066 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5067 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5069 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5070 ll_unsignedp || rl_unsignedp);
5071 if (! all_ones_mask_p (ll_mask, lnbitsize))
5072 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5074 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5075 lr_unsignedp || rr_unsignedp);
5076 if (! all_ones_mask_p (lr_mask, rnbitsize))
5077 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5079 return build2 (wanted_code, truth_type, lhs, rhs);
5082 /* There is still another way we can do something: If both pairs of
5083 fields being compared are adjacent, we may be able to make a wider
5084 field containing them both.
5086 Note that we still must mask the lhs/rhs expressions. Furthermore,
5087 the mask must be shifted to account for the shift done by
5088 make_bit_field_ref. */
5089 if ((ll_bitsize + ll_bitpos == rl_bitpos
5090 && lr_bitsize + lr_bitpos == rr_bitpos)
5091 || (ll_bitpos == rl_bitpos + rl_bitsize
5092 && lr_bitpos == rr_bitpos + rr_bitsize))
5096 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5097 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5098 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5099 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5101 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5102 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5103 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5104 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5106 /* Convert to the smaller type before masking out unwanted bits. */
5108 if (lntype != rntype)
5110 if (lnbitsize > rnbitsize)
5112 lhs = fold_convert (rntype, lhs);
5113 ll_mask = fold_convert (rntype, ll_mask);
5116 else if (lnbitsize < rnbitsize)
5118 rhs = fold_convert (lntype, rhs);
5119 lr_mask = fold_convert (lntype, lr_mask);
5124 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5125 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5127 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5128 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5130 return build2 (wanted_code, truth_type, lhs, rhs);
5136 /* Handle the case of comparisons with constants. If there is something in
5137 common between the masks, those bits of the constants must be the same.
5138 If not, the condition is always false. Test for this to avoid generating
5139 incorrect code below. */
5140 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5141 if (! integer_zerop (result)
5142 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5143 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5145 if (wanted_code == NE_EXPR)
5147 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5148 return constant_boolean_node (true, truth_type);
5152 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5153 return constant_boolean_node (false, truth_type);
5157 /* Construct the expression we will return. First get the component
5158 reference we will make. Unless the mask is all ones the width of
5159 that field, perform the mask operation. Then compare with the
5161 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5162 ll_unsignedp || rl_unsignedp);
5164 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5165 if (! all_ones_mask_p (ll_mask, lnbitsize))
5166 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5168 return build2 (wanted_code, truth_type, result,
5169 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5172 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5176 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5179 enum tree_code op_code;
5180 tree comp_const = op1;
5182 int consts_equal, consts_lt;
5185 STRIP_SIGN_NOPS (arg0);
5187 op_code = TREE_CODE (arg0);
5188 minmax_const = TREE_OPERAND (arg0, 1);
5189 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5190 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5191 inner = TREE_OPERAND (arg0, 0);
5193 /* If something does not permit us to optimize, return the original tree. */
5194 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5195 || TREE_CODE (comp_const) != INTEGER_CST
5196 || TREE_CONSTANT_OVERFLOW (comp_const)
5197 || TREE_CODE (minmax_const) != INTEGER_CST
5198 || TREE_CONSTANT_OVERFLOW (minmax_const))
5201 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5202 and GT_EXPR, doing the rest with recursive calls using logical
5206 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5208 /* FIXME: We should be able to invert code without building a
5209 scratch tree node, but doing so would require us to
5210 duplicate a part of invert_truthvalue here. */
5211 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5212 tem = optimize_minmax_comparison (TREE_CODE (tem),
5214 TREE_OPERAND (tem, 0),
5215 TREE_OPERAND (tem, 1));
5216 return invert_truthvalue (tem);
5221 fold_build2 (TRUTH_ORIF_EXPR, type,
5222 optimize_minmax_comparison
5223 (EQ_EXPR, type, arg0, comp_const),
5224 optimize_minmax_comparison
5225 (GT_EXPR, type, arg0, comp_const));
5228 if (op_code == MAX_EXPR && consts_equal)
5229 /* MAX (X, 0) == 0 -> X <= 0 */
5230 return fold_build2 (LE_EXPR, type, inner, comp_const);
5232 else if (op_code == MAX_EXPR && consts_lt)
5233 /* MAX (X, 0) == 5 -> X == 5 */
5234 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5236 else if (op_code == MAX_EXPR)
5237 /* MAX (X, 0) == -1 -> false */
5238 return omit_one_operand (type, integer_zero_node, inner);
5240 else if (consts_equal)
5241 /* MIN (X, 0) == 0 -> X >= 0 */
5242 return fold_build2 (GE_EXPR, type, inner, comp_const);
5245 /* MIN (X, 0) == 5 -> false */
5246 return omit_one_operand (type, integer_zero_node, inner);
5249 /* MIN (X, 0) == -1 -> X == -1 */
5250 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5253 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5254 /* MAX (X, 0) > 0 -> X > 0
5255 MAX (X, 0) > 5 -> X > 5 */
5256 return fold_build2 (GT_EXPR, type, inner, comp_const);
5258 else if (op_code == MAX_EXPR)
5259 /* MAX (X, 0) > -1 -> true */
5260 return omit_one_operand (type, integer_one_node, inner);
5262 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5263 /* MIN (X, 0) > 0 -> false
5264 MIN (X, 0) > 5 -> false */
5265 return omit_one_operand (type, integer_zero_node, inner);
5268 /* MIN (X, 0) > -1 -> X > -1 */
5269 return fold_build2 (GT_EXPR, type, inner, comp_const);
5276 /* T is an integer expression that is being multiplied, divided, or taken a
5277 modulus (CODE says which and what kind of divide or modulus) by a
5278 constant C. See if we can eliminate that operation by folding it with
5279 other operations already in T. WIDE_TYPE, if non-null, is a type that
5280 should be used for the computation if wider than our type.
5282 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5283 (X * 2) + (Y * 4). We must, however, be assured that either the original
5284 expression would not overflow or that overflow is undefined for the type
5285 in the language in question.
5287 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5288 the machine has a multiply-accumulate insn or that this is part of an
5289 addressing calculation.
5291 If we return a non-null expression, it is an equivalent form of the
5292 original computation, but need not be in the original type. */
5295 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5297 /* To avoid exponential search depth, refuse to allow recursion past
5298 three levels. Beyond that (1) it's highly unlikely that we'll find
5299 something interesting and (2) we've probably processed it before
5300 when we built the inner expression. */
5309 ret = extract_muldiv_1 (t, c, code, wide_type);
5316 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5318 tree type = TREE_TYPE (t);
5319 enum tree_code tcode = TREE_CODE (t);
5320 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5321 > GET_MODE_SIZE (TYPE_MODE (type)))
5322 ? wide_type : type);
5324 int same_p = tcode == code;
5325 tree op0 = NULL_TREE, op1 = NULL_TREE;
5327 /* Don't deal with constants of zero here; they confuse the code below. */
5328 if (integer_zerop (c))
5331 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5332 op0 = TREE_OPERAND (t, 0);
5334 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5335 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5337 /* Note that we need not handle conditional operations here since fold
5338 already handles those cases. So just do arithmetic here. */
5342 /* For a constant, we can always simplify if we are a multiply
5343 or (for divide and modulus) if it is a multiple of our constant. */
5344 if (code == MULT_EXPR
5345 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5346 return const_binop (code, fold_convert (ctype, t),
5347 fold_convert (ctype, c), 0);
5350 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5351 /* If op0 is an expression ... */
5352 if ((COMPARISON_CLASS_P (op0)
5353 || UNARY_CLASS_P (op0)
5354 || BINARY_CLASS_P (op0)
5355 || EXPRESSION_CLASS_P (op0))
5356 /* ... and is unsigned, and its type is smaller than ctype,
5357 then we cannot pass through as widening. */
5358 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5359 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5360 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5361 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5362 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5363 /* ... or this is a truncation (t is narrower than op0),
5364 then we cannot pass through this narrowing. */
5365 || (GET_MODE_SIZE (TYPE_MODE (type))
5366 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5367 /* ... or signedness changes for division or modulus,
5368 then we cannot pass through this conversion. */
5369 || (code != MULT_EXPR
5370 && (TYPE_UNSIGNED (ctype)
5371 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5374 /* Pass the constant down and see if we can make a simplification. If
5375 we can, replace this expression with the inner simplification for
5376 possible later conversion to our or some other type. */
5377 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5378 && TREE_CODE (t2) == INTEGER_CST
5379 && ! TREE_CONSTANT_OVERFLOW (t2)
5380 && (0 != (t1 = extract_muldiv (op0, t2, code,
5382 ? ctype : NULL_TREE))))
5387 /* If widening the type changes it from signed to unsigned, then we
5388 must avoid building ABS_EXPR itself as unsigned. */
5389 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5391 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5392 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5394 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5395 return fold_convert (ctype, t1);
5401 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5402 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5405 case MIN_EXPR: case MAX_EXPR:
5406 /* If widening the type changes the signedness, then we can't perform
5407 this optimization as that changes the result. */
5408 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5411 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5412 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5413 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5415 if (tree_int_cst_sgn (c) < 0)
5416 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5418 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5419 fold_convert (ctype, t2));
5423 case LSHIFT_EXPR: case RSHIFT_EXPR:
5424 /* If the second operand is constant, this is a multiplication
5425 or floor division, by a power of two, so we can treat it that
5426 way unless the multiplier or divisor overflows. Signed
5427 left-shift overflow is implementation-defined rather than
5428 undefined in C90, so do not convert signed left shift into
5430 if (TREE_CODE (op1) == INTEGER_CST
5431 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5432 /* const_binop may not detect overflow correctly,
5433 so check for it explicitly here. */
5434 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5435 && TREE_INT_CST_HIGH (op1) == 0
5436 && 0 != (t1 = fold_convert (ctype,
5437 const_binop (LSHIFT_EXPR,
5440 && ! TREE_OVERFLOW (t1))
5441 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5442 ? MULT_EXPR : FLOOR_DIV_EXPR,
5443 ctype, fold_convert (ctype, op0), t1),
5444 c, code, wide_type);
5447 case PLUS_EXPR: case MINUS_EXPR:
5448 /* See if we can eliminate the operation on both sides. If we can, we
5449 can return a new PLUS or MINUS. If we can't, the only remaining
5450 cases where we can do anything are if the second operand is a
5452 t1 = extract_muldiv (op0, c, code, wide_type);
5453 t2 = extract_muldiv (op1, c, code, wide_type);
5454 if (t1 != 0 && t2 != 0
5455 && (code == MULT_EXPR
5456 /* If not multiplication, we can only do this if both operands
5457 are divisible by c. */
5458 || (multiple_of_p (ctype, op0, c)
5459 && multiple_of_p (ctype, op1, c))))
5460 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5461 fold_convert (ctype, t2));
5463 /* If this was a subtraction, negate OP1 and set it to be an addition.
5464 This simplifies the logic below. */
5465 if (tcode == MINUS_EXPR)
5466 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5468 if (TREE_CODE (op1) != INTEGER_CST)
5471 /* If either OP1 or C are negative, this optimization is not safe for
5472 some of the division and remainder types while for others we need
5473 to change the code. */
5474 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5476 if (code == CEIL_DIV_EXPR)
5477 code = FLOOR_DIV_EXPR;
5478 else if (code == FLOOR_DIV_EXPR)
5479 code = CEIL_DIV_EXPR;
5480 else if (code != MULT_EXPR
5481 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5485 /* If it's a multiply or a division/modulus operation of a multiple
5486 of our constant, do the operation and verify it doesn't overflow. */
5487 if (code == MULT_EXPR
5488 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5490 op1 = const_binop (code, fold_convert (ctype, op1),
5491 fold_convert (ctype, c), 0);
5492 /* We allow the constant to overflow with wrapping semantics. */
5494 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5500 /* If we have an unsigned type is not a sizetype, we cannot widen
5501 the operation since it will change the result if the original
5502 computation overflowed. */
5503 if (TYPE_UNSIGNED (ctype)
5504 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5508 /* If we were able to eliminate our operation from the first side,
5509 apply our operation to the second side and reform the PLUS. */
5510 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5511 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5513 /* The last case is if we are a multiply. In that case, we can
5514 apply the distributive law to commute the multiply and addition
5515 if the multiplication of the constants doesn't overflow. */
5516 if (code == MULT_EXPR)
5517 return fold_build2 (tcode, ctype,
5518 fold_build2 (code, ctype,
5519 fold_convert (ctype, op0),
5520 fold_convert (ctype, c)),
5526 /* We have a special case here if we are doing something like
5527 (C * 8) % 4 since we know that's zero. */
5528 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5529 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5530 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5531 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5532 return omit_one_operand (type, integer_zero_node, op0);
5534 /* ... fall through ... */
5536 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5537 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5538 /* If we can extract our operation from the LHS, do so and return a
5539 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5540 do something only if the second operand is a constant. */
5542 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5543 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5544 fold_convert (ctype, op1));
5545 else if (tcode == MULT_EXPR && code == MULT_EXPR
5546 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5547 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5548 fold_convert (ctype, t1));
5549 else if (TREE_CODE (op1) != INTEGER_CST)
5552 /* If these are the same operation types, we can associate them
5553 assuming no overflow. */
5555 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5556 fold_convert (ctype, c), 0))
5557 && ! TREE_OVERFLOW (t1))
5558 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5560 /* If these operations "cancel" each other, we have the main
5561 optimizations of this pass, which occur when either constant is a
5562 multiple of the other, in which case we replace this with either an
5563 operation or CODE or TCODE.
5565 If we have an unsigned type that is not a sizetype, we cannot do
5566 this since it will change the result if the original computation
5568 if ((! TYPE_UNSIGNED (ctype)
5569 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5571 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5572 || (tcode == MULT_EXPR
5573 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5574 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5576 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5577 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5578 fold_convert (ctype,
5579 const_binop (TRUNC_DIV_EXPR,
5581 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5582 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5583 fold_convert (ctype,
5584 const_binop (TRUNC_DIV_EXPR,
5596 /* Return a node which has the indicated constant VALUE (either 0 or
5597 1), and is of the indicated TYPE. */
5600 constant_boolean_node (int value, tree type)
5602 if (type == integer_type_node)
5603 return value ? integer_one_node : integer_zero_node;
5604 else if (type == boolean_type_node)
5605 return value ? boolean_true_node : boolean_false_node;
5607 return build_int_cst (type, value);
5611 /* Return true if expr looks like an ARRAY_REF and set base and
5612 offset to the appropriate trees. If there is no offset,
5613 offset is set to NULL_TREE. Base will be canonicalized to
5614 something you can get the element type from using
5615 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5616 in bytes to the base. */
5619 extract_array_ref (tree expr, tree *base, tree *offset)
5621 /* One canonical form is a PLUS_EXPR with the first
5622 argument being an ADDR_EXPR with a possible NOP_EXPR
5624 if (TREE_CODE (expr) == PLUS_EXPR)
5626 tree op0 = TREE_OPERAND (expr, 0);
5627 tree inner_base, dummy1;
5628 /* Strip NOP_EXPRs here because the C frontends and/or
5629 folders present us (int *)&x.a + 4B possibly. */
5631 if (extract_array_ref (op0, &inner_base, &dummy1))
5634 if (dummy1 == NULL_TREE)
5635 *offset = TREE_OPERAND (expr, 1);
5637 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5638 dummy1, TREE_OPERAND (expr, 1));
5642 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5643 which we transform into an ADDR_EXPR with appropriate
5644 offset. For other arguments to the ADDR_EXPR we assume
5645 zero offset and as such do not care about the ADDR_EXPR
5646 type and strip possible nops from it. */
5647 else if (TREE_CODE (expr) == ADDR_EXPR)
5649 tree op0 = TREE_OPERAND (expr, 0);
5650 if (TREE_CODE (op0) == ARRAY_REF)
5652 tree idx = TREE_OPERAND (op0, 1);
5653 *base = TREE_OPERAND (op0, 0);
5654 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5655 array_ref_element_size (op0));
5659 /* Handle array-to-pointer decay as &a. */
5660 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5661 *base = TREE_OPERAND (expr, 0);
5664 *offset = NULL_TREE;
5668 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5669 else if (SSA_VAR_P (expr)
5670 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5673 *offset = NULL_TREE;
5681 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5682 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5683 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5684 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5685 COND is the first argument to CODE; otherwise (as in the example
5686 given here), it is the second argument. TYPE is the type of the
5687 original expression. Return NULL_TREE if no simplification is
5691 fold_binary_op_with_conditional_arg (enum tree_code code,
5692 tree type, tree op0, tree op1,
5693 tree cond, tree arg, int cond_first_p)
5695 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5696 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5697 tree test, true_value, false_value;
5698 tree lhs = NULL_TREE;
5699 tree rhs = NULL_TREE;
5701 /* This transformation is only worthwhile if we don't have to wrap
5702 arg in a SAVE_EXPR, and the operation can be simplified on at least
5703 one of the branches once its pushed inside the COND_EXPR. */
5704 if (!TREE_CONSTANT (arg))
5707 if (TREE_CODE (cond) == COND_EXPR)
5709 test = TREE_OPERAND (cond, 0);
5710 true_value = TREE_OPERAND (cond, 1);
5711 false_value = TREE_OPERAND (cond, 2);
5712 /* If this operand throws an expression, then it does not make
5713 sense to try to perform a logical or arithmetic operation
5715 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5717 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5722 tree testtype = TREE_TYPE (cond);
5724 true_value = constant_boolean_node (true, testtype);
5725 false_value = constant_boolean_node (false, testtype);
5728 arg = fold_convert (arg_type, arg);
5731 true_value = fold_convert (cond_type, true_value);
5733 lhs = fold_build2 (code, type, true_value, arg);
5735 lhs = fold_build2 (code, type, arg, true_value);
5739 false_value = fold_convert (cond_type, false_value);
5741 rhs = fold_build2 (code, type, false_value, arg);
5743 rhs = fold_build2 (code, type, arg, false_value);
5746 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5747 return fold_convert (type, test);
5751 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5753 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5754 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5755 ADDEND is the same as X.
5757 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5758 and finite. The problematic cases are when X is zero, and its mode
5759 has signed zeros. In the case of rounding towards -infinity,
5760 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5761 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5764 fold_real_zero_addition_p (tree type, tree addend, int negate)
5766 if (!real_zerop (addend))
5769 /* Don't allow the fold with -fsignaling-nans. */
5770 if (HONOR_SNANS (TYPE_MODE (type)))
5773 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5774 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5777 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5778 if (TREE_CODE (addend) == REAL_CST
5779 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5782 /* The mode has signed zeros, and we have to honor their sign.
5783 In this situation, there is only one case we can return true for.
5784 X - 0 is the same as X unless rounding towards -infinity is
5786 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5789 /* Subroutine of fold() that checks comparisons of built-in math
5790 functions against real constants.
5792 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5793 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5794 is the type of the result and ARG0 and ARG1 are the operands of the
5795 comparison. ARG1 must be a TREE_REAL_CST.
5797 The function returns the constant folded tree if a simplification
5798 can be made, and NULL_TREE otherwise. */
5801 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5802 tree type, tree arg0, tree arg1)
5806 if (BUILTIN_SQRT_P (fcode))
5808 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5809 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5811 c = TREE_REAL_CST (arg1);
5812 if (REAL_VALUE_NEGATIVE (c))
5814 /* sqrt(x) < y is always false, if y is negative. */
5815 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5816 return omit_one_operand (type, integer_zero_node, arg);
5818 /* sqrt(x) > y is always true, if y is negative and we
5819 don't care about NaNs, i.e. negative values of x. */
5820 if (code == NE_EXPR || !HONOR_NANS (mode))
5821 return omit_one_operand (type, integer_one_node, arg);
5823 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5824 return fold_build2 (GE_EXPR, type, arg,
5825 build_real (TREE_TYPE (arg), dconst0));
5827 else if (code == GT_EXPR || code == GE_EXPR)
5831 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5832 real_convert (&c2, mode, &c2);
5834 if (REAL_VALUE_ISINF (c2))
5836 /* sqrt(x) > y is x == +Inf, when y is very large. */
5837 if (HONOR_INFINITIES (mode))
5838 return fold_build2 (EQ_EXPR, type, arg,
5839 build_real (TREE_TYPE (arg), c2));
5841 /* sqrt(x) > y is always false, when y is very large
5842 and we don't care about infinities. */
5843 return omit_one_operand (type, integer_zero_node, arg);
5846 /* sqrt(x) > c is the same as x > c*c. */
5847 return fold_build2 (code, type, arg,
5848 build_real (TREE_TYPE (arg), c2));
5850 else if (code == LT_EXPR || code == LE_EXPR)
5854 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5855 real_convert (&c2, mode, &c2);
5857 if (REAL_VALUE_ISINF (c2))
5859 /* sqrt(x) < y is always true, when y is a very large
5860 value and we don't care about NaNs or Infinities. */
5861 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5862 return omit_one_operand (type, integer_one_node, arg);
5864 /* sqrt(x) < y is x != +Inf when y is very large and we
5865 don't care about NaNs. */
5866 if (! HONOR_NANS (mode))
5867 return fold_build2 (NE_EXPR, type, arg,
5868 build_real (TREE_TYPE (arg), c2));
5870 /* sqrt(x) < y is x >= 0 when y is very large and we
5871 don't care about Infinities. */
5872 if (! HONOR_INFINITIES (mode))
5873 return fold_build2 (GE_EXPR, type, arg,
5874 build_real (TREE_TYPE (arg), dconst0));
5876 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5877 if (lang_hooks.decls.global_bindings_p () != 0
5878 || CONTAINS_PLACEHOLDER_P (arg))
5881 arg = save_expr (arg);
5882 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5883 fold_build2 (GE_EXPR, type, arg,
5884 build_real (TREE_TYPE (arg),
5886 fold_build2 (NE_EXPR, type, arg,
5887 build_real (TREE_TYPE (arg),
5891 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5892 if (! HONOR_NANS (mode))
5893 return fold_build2 (code, type, arg,
5894 build_real (TREE_TYPE (arg), c2));
5896 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5897 if (lang_hooks.decls.global_bindings_p () == 0
5898 && ! CONTAINS_PLACEHOLDER_P (arg))
5900 arg = save_expr (arg);
5901 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5902 fold_build2 (GE_EXPR, type, arg,
5903 build_real (TREE_TYPE (arg),
5905 fold_build2 (code, type, arg,
5906 build_real (TREE_TYPE (arg),
5915 /* Subroutine of fold() that optimizes comparisons against Infinities,
5916 either +Inf or -Inf.
5918 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5919 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5920 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5922 The function returns the constant folded tree if a simplification
5923 can be made, and NULL_TREE otherwise. */
5926 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5928 enum machine_mode mode;
5929 REAL_VALUE_TYPE max;
5933 mode = TYPE_MODE (TREE_TYPE (arg0));
5935 /* For negative infinity swap the sense of the comparison. */
5936 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5938 code = swap_tree_comparison (code);
5943 /* x > +Inf is always false, if with ignore sNANs. */
5944 if (HONOR_SNANS (mode))
5946 return omit_one_operand (type, integer_zero_node, arg0);
5949 /* x <= +Inf is always true, if we don't case about NaNs. */
5950 if (! HONOR_NANS (mode))
5951 return omit_one_operand (type, integer_one_node, arg0);
5953 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5954 if (lang_hooks.decls.global_bindings_p () == 0
5955 && ! CONTAINS_PLACEHOLDER_P (arg0))
5957 arg0 = save_expr (arg0);
5958 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5964 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5965 real_maxval (&max, neg, mode);
5966 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5967 arg0, build_real (TREE_TYPE (arg0), max));
5970 /* x < +Inf is always equal to x <= DBL_MAX. */
5971 real_maxval (&max, neg, mode);
5972 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5973 arg0, build_real (TREE_TYPE (arg0), max));
5976 /* x != +Inf is always equal to !(x > DBL_MAX). */
5977 real_maxval (&max, neg, mode);
5978 if (! HONOR_NANS (mode))
5979 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5980 arg0, build_real (TREE_TYPE (arg0), max));
5982 /* The transformation below creates non-gimple code and thus is
5983 not appropriate if we are in gimple form. */
5987 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5988 arg0, build_real (TREE_TYPE (arg0), max));
5989 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5998 /* Subroutine of fold() that optimizes comparisons of a division by
5999 a nonzero integer constant against an integer constant, i.e.
6002 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6003 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6004 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6006 The function returns the constant folded tree if a simplification
6007 can be made, and NULL_TREE otherwise. */
6010 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6012 tree prod, tmp, hi, lo;
6013 tree arg00 = TREE_OPERAND (arg0, 0);
6014 tree arg01 = TREE_OPERAND (arg0, 1);
6015 unsigned HOST_WIDE_INT lpart;
6016 HOST_WIDE_INT hpart;
6019 /* We have to do this the hard way to detect unsigned overflow.
6020 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6021 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6022 TREE_INT_CST_HIGH (arg01),
6023 TREE_INT_CST_LOW (arg1),
6024 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6025 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6026 prod = force_fit_type (prod, -1, overflow, false);
6028 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6030 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6033 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6034 overflow = add_double (TREE_INT_CST_LOW (prod),
6035 TREE_INT_CST_HIGH (prod),
6036 TREE_INT_CST_LOW (tmp),
6037 TREE_INT_CST_HIGH (tmp),
6039 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6040 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6041 TREE_CONSTANT_OVERFLOW (prod));
6043 else if (tree_int_cst_sgn (arg01) >= 0)
6045 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6046 switch (tree_int_cst_sgn (arg1))
6049 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6054 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6059 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6069 /* A negative divisor reverses the relational operators. */
6070 code = swap_tree_comparison (code);
6072 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6073 switch (tree_int_cst_sgn (arg1))
6076 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6081 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6086 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6098 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6099 return omit_one_operand (type, integer_zero_node, arg00);
6100 if (TREE_OVERFLOW (hi))
6101 return fold_build2 (GE_EXPR, type, arg00, lo);
6102 if (TREE_OVERFLOW (lo))
6103 return fold_build2 (LE_EXPR, type, arg00, hi);
6104 return build_range_check (type, arg00, 1, lo, hi);
6107 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6108 return omit_one_operand (type, integer_one_node, arg00);
6109 if (TREE_OVERFLOW (hi))
6110 return fold_build2 (LT_EXPR, type, arg00, lo);
6111 if (TREE_OVERFLOW (lo))
6112 return fold_build2 (GT_EXPR, type, arg00, hi);
6113 return build_range_check (type, arg00, 0, lo, hi);
6116 if (TREE_OVERFLOW (lo))
6117 return omit_one_operand (type, integer_one_node, arg00);
6118 return fold_build2 (LT_EXPR, type, arg00, lo);
6121 if (TREE_OVERFLOW (hi))
6122 return omit_one_operand (type, integer_one_node, arg00);
6123 return fold_build2 (LE_EXPR, type, arg00, hi);
6126 if (TREE_OVERFLOW (hi))
6127 return omit_one_operand (type, integer_zero_node, arg00);
6128 return fold_build2 (GT_EXPR, type, arg00, hi);
6131 if (TREE_OVERFLOW (lo))
6132 return omit_one_operand (type, integer_zero_node, arg00);
6133 return fold_build2 (GE_EXPR, type, arg00, lo);
6143 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6144 equality/inequality test, then return a simplified form of the test
6145 using a sign testing. Otherwise return NULL. TYPE is the desired
6149 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6152 /* If this is testing a single bit, we can optimize the test. */
6153 if ((code == NE_EXPR || code == EQ_EXPR)
6154 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6155 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6157 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6158 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6159 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6161 if (arg00 != NULL_TREE
6162 /* This is only a win if casting to a signed type is cheap,
6163 i.e. when arg00's type is not a partial mode. */
6164 && TYPE_PRECISION (TREE_TYPE (arg00))
6165 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6167 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6168 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6169 result_type, fold_convert (stype, arg00),
6170 build_int_cst (stype, 0));
6177 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6178 equality/inequality test, then return a simplified form of
6179 the test using shifts and logical operations. Otherwise return
6180 NULL. TYPE is the desired result type. */
6183 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6186 /* If this is testing a single bit, we can optimize the test. */
6187 if ((code == NE_EXPR || code == EQ_EXPR)
6188 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6189 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6191 tree inner = TREE_OPERAND (arg0, 0);
6192 tree type = TREE_TYPE (arg0);
6193 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6194 enum machine_mode operand_mode = TYPE_MODE (type);
6196 tree signed_type, unsigned_type, intermediate_type;
6199 /* First, see if we can fold the single bit test into a sign-bit
6201 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6206 /* Otherwise we have (A & C) != 0 where C is a single bit,
6207 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6208 Similarly for (A & C) == 0. */
6210 /* If INNER is a right shift of a constant and it plus BITNUM does
6211 not overflow, adjust BITNUM and INNER. */
6212 if (TREE_CODE (inner) == RSHIFT_EXPR
6213 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6214 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6215 && bitnum < TYPE_PRECISION (type)
6216 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6217 bitnum - TYPE_PRECISION (type)))
6219 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6220 inner = TREE_OPERAND (inner, 0);
6223 /* If we are going to be able to omit the AND below, we must do our
6224 operations as unsigned. If we must use the AND, we have a choice.
6225 Normally unsigned is faster, but for some machines signed is. */
6226 #ifdef LOAD_EXTEND_OP
6227 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6228 && !flag_syntax_only) ? 0 : 1;
6233 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6234 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6235 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6236 inner = fold_convert (intermediate_type, inner);
6239 inner = build2 (RSHIFT_EXPR, intermediate_type,
6240 inner, size_int (bitnum));
6242 if (code == EQ_EXPR)
6243 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6244 inner, integer_one_node);
6246 /* Put the AND last so it can combine with more things. */
6247 inner = build2 (BIT_AND_EXPR, intermediate_type,
6248 inner, integer_one_node);
6250 /* Make sure to return the proper type. */
6251 inner = fold_convert (result_type, inner);
6258 /* Check whether we are allowed to reorder operands arg0 and arg1,
6259 such that the evaluation of arg1 occurs before arg0. */
6262 reorder_operands_p (tree arg0, tree arg1)
6264 if (! flag_evaluation_order)
6266 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6268 return ! TREE_SIDE_EFFECTS (arg0)
6269 && ! TREE_SIDE_EFFECTS (arg1);
6272 /* Test whether it is preferable two swap two operands, ARG0 and
6273 ARG1, for example because ARG0 is an integer constant and ARG1
6274 isn't. If REORDER is true, only recommend swapping if we can
6275 evaluate the operands in reverse order. */
6278 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6280 STRIP_SIGN_NOPS (arg0);
6281 STRIP_SIGN_NOPS (arg1);
6283 if (TREE_CODE (arg1) == INTEGER_CST)
6285 if (TREE_CODE (arg0) == INTEGER_CST)
6288 if (TREE_CODE (arg1) == REAL_CST)
6290 if (TREE_CODE (arg0) == REAL_CST)
6293 if (TREE_CODE (arg1) == COMPLEX_CST)
6295 if (TREE_CODE (arg0) == COMPLEX_CST)
6298 if (TREE_CONSTANT (arg1))
6300 if (TREE_CONSTANT (arg0))
6306 if (reorder && flag_evaluation_order
6307 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6315 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6316 for commutative and comparison operators. Ensuring a canonical
6317 form allows the optimizers to find additional redundancies without
6318 having to explicitly check for both orderings. */
6319 if (TREE_CODE (arg0) == SSA_NAME
6320 && TREE_CODE (arg1) == SSA_NAME
6321 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6327 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6328 ARG0 is extended to a wider type. */
6331 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6333 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6335 tree shorter_type, outer_type;
6339 if (arg0_unw == arg0)
6341 shorter_type = TREE_TYPE (arg0_unw);
6343 #ifdef HAVE_canonicalize_funcptr_for_compare
6344 /* Disable this optimization if we're casting a function pointer
6345 type on targets that require function pointer canonicalization. */
6346 if (HAVE_canonicalize_funcptr_for_compare
6347 && TREE_CODE (shorter_type) == POINTER_TYPE
6348 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6352 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6355 arg1_unw = get_unwidened (arg1, shorter_type);
6357 /* If possible, express the comparison in the shorter mode. */
6358 if ((code == EQ_EXPR || code == NE_EXPR
6359 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6360 && (TREE_TYPE (arg1_unw) == shorter_type
6361 || (TREE_CODE (arg1_unw) == INTEGER_CST
6362 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6363 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6364 && int_fits_type_p (arg1_unw, shorter_type))))
6365 return fold_build2 (code, type, arg0_unw,
6366 fold_convert (shorter_type, arg1_unw));
6368 if (TREE_CODE (arg1_unw) != INTEGER_CST
6369 || TREE_CODE (shorter_type) != INTEGER_TYPE
6370 || !int_fits_type_p (arg1_unw, shorter_type))
6373 /* If we are comparing with the integer that does not fit into the range
6374 of the shorter type, the result is known. */
6375 outer_type = TREE_TYPE (arg1_unw);
6376 min = lower_bound_in_type (outer_type, shorter_type);
6377 max = upper_bound_in_type (outer_type, shorter_type);
6379 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6381 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6388 return omit_one_operand (type, integer_zero_node, arg0);
6393 return omit_one_operand (type, integer_one_node, arg0);
6399 return omit_one_operand (type, integer_one_node, arg0);
6401 return omit_one_operand (type, integer_zero_node, arg0);
6406 return omit_one_operand (type, integer_zero_node, arg0);
6408 return omit_one_operand (type, integer_one_node, arg0);
6417 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6418 ARG0 just the signedness is changed. */
6421 fold_sign_changed_comparison (enum tree_code code, tree type,
6422 tree arg0, tree arg1)
6424 tree arg0_inner, tmp;
6425 tree inner_type, outer_type;
6427 if (TREE_CODE (arg0) != NOP_EXPR
6428 && TREE_CODE (arg0) != CONVERT_EXPR)
6431 outer_type = TREE_TYPE (arg0);
6432 arg0_inner = TREE_OPERAND (arg0, 0);
6433 inner_type = TREE_TYPE (arg0_inner);
6435 #ifdef HAVE_canonicalize_funcptr_for_compare
6436 /* Disable this optimization if we're casting a function pointer
6437 type on targets that require function pointer canonicalization. */
6438 if (HAVE_canonicalize_funcptr_for_compare
6439 && TREE_CODE (inner_type) == POINTER_TYPE
6440 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6444 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6447 if (TREE_CODE (arg1) != INTEGER_CST
6448 && !((TREE_CODE (arg1) == NOP_EXPR
6449 || TREE_CODE (arg1) == CONVERT_EXPR)
6450 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6453 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6458 if (TREE_CODE (arg1) == INTEGER_CST)
6460 tmp = build_int_cst_wide (inner_type,
6461 TREE_INT_CST_LOW (arg1),
6462 TREE_INT_CST_HIGH (arg1));
6463 arg1 = force_fit_type (tmp, 0,
6464 TREE_OVERFLOW (arg1),
6465 TREE_CONSTANT_OVERFLOW (arg1));
6468 arg1 = fold_convert (inner_type, arg1);
6470 return fold_build2 (code, type, arg0_inner, arg1);
6473 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6474 step of the array. Reconstructs s and delta in the case of s * delta
6475 being an integer constant (and thus already folded).
6476 ADDR is the address. MULT is the multiplicative expression.
6477 If the function succeeds, the new address expression is returned. Otherwise
6478 NULL_TREE is returned. */
6481 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6483 tree s, delta, step;
6484 tree ref = TREE_OPERAND (addr, 0), pref;
6488 /* Canonicalize op1 into a possibly non-constant delta
6489 and an INTEGER_CST s. */
6490 if (TREE_CODE (op1) == MULT_EXPR)
6492 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6497 if (TREE_CODE (arg0) == INTEGER_CST)
6502 else if (TREE_CODE (arg1) == INTEGER_CST)
6510 else if (TREE_CODE (op1) == INTEGER_CST)
6517 /* Simulate we are delta * 1. */
6519 s = integer_one_node;
6522 for (;; ref = TREE_OPERAND (ref, 0))
6524 if (TREE_CODE (ref) == ARRAY_REF)
6526 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6530 step = array_ref_element_size (ref);
6531 if (TREE_CODE (step) != INTEGER_CST)
6536 if (! tree_int_cst_equal (step, s))
6541 /* Try if delta is a multiple of step. */
6542 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6551 if (!handled_component_p (ref))
6555 /* We found the suitable array reference. So copy everything up to it,
6556 and replace the index. */
6558 pref = TREE_OPERAND (addr, 0);
6559 ret = copy_node (pref);
6564 pref = TREE_OPERAND (pref, 0);
6565 TREE_OPERAND (pos, 0) = copy_node (pref);
6566 pos = TREE_OPERAND (pos, 0);
6569 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6570 fold_convert (itype,
6571 TREE_OPERAND (pos, 1)),
6572 fold_convert (itype, delta));
6574 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6578 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6579 means A >= Y && A != MAX, but in this case we know that
6580 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6583 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6585 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6587 if (TREE_CODE (bound) == LT_EXPR)
6588 a = TREE_OPERAND (bound, 0);
6589 else if (TREE_CODE (bound) == GT_EXPR)
6590 a = TREE_OPERAND (bound, 1);
6594 typea = TREE_TYPE (a);
6595 if (!INTEGRAL_TYPE_P (typea)
6596 && !POINTER_TYPE_P (typea))
6599 if (TREE_CODE (ineq) == LT_EXPR)
6601 a1 = TREE_OPERAND (ineq, 1);
6602 y = TREE_OPERAND (ineq, 0);
6604 else if (TREE_CODE (ineq) == GT_EXPR)
6606 a1 = TREE_OPERAND (ineq, 0);
6607 y = TREE_OPERAND (ineq, 1);
6612 if (TREE_TYPE (a1) != typea)
6615 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6616 if (!integer_onep (diff))
6619 return fold_build2 (GE_EXPR, type, a, y);
6622 /* Fold a sum or difference of at least one multiplication.
6623 Returns the folded tree or NULL if no simplification could be made. */
6626 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6628 tree arg00, arg01, arg10, arg11;
6629 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6631 /* (A * C) +- (B * C) -> (A+-B) * C.
6632 (A * C) +- A -> A * (C+-1).
6633 We are most concerned about the case where C is a constant,
6634 but other combinations show up during loop reduction. Since
6635 it is not difficult, try all four possibilities. */
6637 if (TREE_CODE (arg0) == MULT_EXPR)
6639 arg00 = TREE_OPERAND (arg0, 0);
6640 arg01 = TREE_OPERAND (arg0, 1);
6645 if (!FLOAT_TYPE_P (type))
6646 arg01 = build_int_cst (type, 1);
6648 arg01 = build_real (type, dconst1);
6650 if (TREE_CODE (arg1) == MULT_EXPR)
6652 arg10 = TREE_OPERAND (arg1, 0);
6653 arg11 = TREE_OPERAND (arg1, 1);
6658 if (!FLOAT_TYPE_P (type))
6659 arg11 = build_int_cst (type, 1);
6661 arg11 = build_real (type, dconst1);
6665 if (operand_equal_p (arg01, arg11, 0))
6666 same = arg01, alt0 = arg00, alt1 = arg10;
6667 else if (operand_equal_p (arg00, arg10, 0))
6668 same = arg00, alt0 = arg01, alt1 = arg11;
6669 else if (operand_equal_p (arg00, arg11, 0))
6670 same = arg00, alt0 = arg01, alt1 = arg10;
6671 else if (operand_equal_p (arg01, arg10, 0))
6672 same = arg01, alt0 = arg00, alt1 = arg11;
6674 /* No identical multiplicands; see if we can find a common
6675 power-of-two factor in non-power-of-two multiplies. This
6676 can help in multi-dimensional array access. */
6677 else if (host_integerp (arg01, 0)
6678 && host_integerp (arg11, 0))
6680 HOST_WIDE_INT int01, int11, tmp;
6683 int01 = TREE_INT_CST_LOW (arg01);
6684 int11 = TREE_INT_CST_LOW (arg11);
6686 /* Move min of absolute values to int11. */
6687 if ((int01 >= 0 ? int01 : -int01)
6688 < (int11 >= 0 ? int11 : -int11))
6690 tmp = int01, int01 = int11, int11 = tmp;
6691 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6698 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6700 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6701 build_int_cst (TREE_TYPE (arg00),
6706 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6711 return fold_build2 (MULT_EXPR, type,
6712 fold_build2 (code, type,
6713 fold_convert (type, alt0),
6714 fold_convert (type, alt1)),
6715 fold_convert (type, same));
6720 /* Fold a unary expression of code CODE and type TYPE with operand
6721 OP0. Return the folded expression if folding is successful.
6722 Otherwise, return NULL_TREE. */
6725 fold_unary (enum tree_code code, tree type, tree op0)
6729 enum tree_code_class kind = TREE_CODE_CLASS (code);
6731 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6732 && TREE_CODE_LENGTH (code) == 1);
6737 if (code == NOP_EXPR || code == CONVERT_EXPR
6738 || code == FLOAT_EXPR || code == ABS_EXPR)
6740 /* Don't use STRIP_NOPS, because signedness of argument type
6742 STRIP_SIGN_NOPS (arg0);
6746 /* Strip any conversions that don't change the mode. This
6747 is safe for every expression, except for a comparison
6748 expression because its signedness is derived from its
6751 Note that this is done as an internal manipulation within
6752 the constant folder, in order to find the simplest
6753 representation of the arguments so that their form can be
6754 studied. In any cases, the appropriate type conversions
6755 should be put back in the tree that will get out of the
6761 if (TREE_CODE_CLASS (code) == tcc_unary)
6763 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6764 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6765 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6766 else if (TREE_CODE (arg0) == COND_EXPR)
6768 tree arg01 = TREE_OPERAND (arg0, 1);
6769 tree arg02 = TREE_OPERAND (arg0, 2);
6770 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6771 arg01 = fold_build1 (code, type, arg01);
6772 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6773 arg02 = fold_build1 (code, type, arg02);
6774 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6777 /* If this was a conversion, and all we did was to move into
6778 inside the COND_EXPR, bring it back out. But leave it if
6779 it is a conversion from integer to integer and the
6780 result precision is no wider than a word since such a
6781 conversion is cheap and may be optimized away by combine,
6782 while it couldn't if it were outside the COND_EXPR. Then return
6783 so we don't get into an infinite recursion loop taking the
6784 conversion out and then back in. */
6786 if ((code == NOP_EXPR || code == CONVERT_EXPR
6787 || code == NON_LVALUE_EXPR)
6788 && TREE_CODE (tem) == COND_EXPR
6789 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6790 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6791 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6792 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6793 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6794 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6795 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6797 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6798 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6799 || flag_syntax_only))
6800 tem = build1 (code, type,
6802 TREE_TYPE (TREE_OPERAND
6803 (TREE_OPERAND (tem, 1), 0)),
6804 TREE_OPERAND (tem, 0),
6805 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6806 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6809 else if (COMPARISON_CLASS_P (arg0))
6811 if (TREE_CODE (type) == BOOLEAN_TYPE)
6813 arg0 = copy_node (arg0);
6814 TREE_TYPE (arg0) = type;
6817 else if (TREE_CODE (type) != INTEGER_TYPE)
6818 return fold_build3 (COND_EXPR, type, arg0,
6819 fold_build1 (code, type,
6821 fold_build1 (code, type,
6822 integer_zero_node));
6831 case FIX_TRUNC_EXPR:
6833 case FIX_FLOOR_EXPR:
6834 case FIX_ROUND_EXPR:
6835 if (TREE_TYPE (op0) == type)
6838 /* If we have (type) (a CMP b) and type is an integral type, return
6839 new expression involving the new type. */
6840 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6841 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6842 TREE_OPERAND (op0, 1));
6844 /* Handle cases of two conversions in a row. */
6845 if (TREE_CODE (op0) == NOP_EXPR
6846 || TREE_CODE (op0) == CONVERT_EXPR)
6848 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6849 tree inter_type = TREE_TYPE (op0);
6850 int inside_int = INTEGRAL_TYPE_P (inside_type);
6851 int inside_ptr = POINTER_TYPE_P (inside_type);
6852 int inside_float = FLOAT_TYPE_P (inside_type);
6853 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6854 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6855 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6856 int inter_int = INTEGRAL_TYPE_P (inter_type);
6857 int inter_ptr = POINTER_TYPE_P (inter_type);
6858 int inter_float = FLOAT_TYPE_P (inter_type);
6859 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6860 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6861 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6862 int final_int = INTEGRAL_TYPE_P (type);
6863 int final_ptr = POINTER_TYPE_P (type);
6864 int final_float = FLOAT_TYPE_P (type);
6865 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6866 unsigned int final_prec = TYPE_PRECISION (type);
6867 int final_unsignedp = TYPE_UNSIGNED (type);
6869 /* In addition to the cases of two conversions in a row
6870 handled below, if we are converting something to its own
6871 type via an object of identical or wider precision, neither
6872 conversion is needed. */
6873 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6874 && ((inter_int && final_int) || (inter_float && final_float))
6875 && inter_prec >= final_prec)
6876 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6878 /* Likewise, if the intermediate and final types are either both
6879 float or both integer, we don't need the middle conversion if
6880 it is wider than the final type and doesn't change the signedness
6881 (for integers). Avoid this if the final type is a pointer
6882 since then we sometimes need the inner conversion. Likewise if
6883 the outer has a precision not equal to the size of its mode. */
6884 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6885 || (inter_float && inside_float)
6886 || (inter_vec && inside_vec))
6887 && inter_prec >= inside_prec
6888 && (inter_float || inter_vec
6889 || inter_unsignedp == inside_unsignedp)
6890 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6891 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6893 && (! final_vec || inter_prec == inside_prec))
6894 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6896 /* If we have a sign-extension of a zero-extended value, we can
6897 replace that by a single zero-extension. */
6898 if (inside_int && inter_int && final_int
6899 && inside_prec < inter_prec && inter_prec < final_prec
6900 && inside_unsignedp && !inter_unsignedp)
6901 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6903 /* Two conversions in a row are not needed unless:
6904 - some conversion is floating-point (overstrict for now), or
6905 - some conversion is a vector (overstrict for now), or
6906 - the intermediate type is narrower than both initial and
6908 - the intermediate type and innermost type differ in signedness,
6909 and the outermost type is wider than the intermediate, or
6910 - the initial type is a pointer type and the precisions of the
6911 intermediate and final types differ, or
6912 - the final type is a pointer type and the precisions of the
6913 initial and intermediate types differ. */
6914 if (! inside_float && ! inter_float && ! final_float
6915 && ! inside_vec && ! inter_vec && ! final_vec
6916 && (inter_prec > inside_prec || inter_prec > final_prec)
6917 && ! (inside_int && inter_int
6918 && inter_unsignedp != inside_unsignedp
6919 && inter_prec < final_prec)
6920 && ((inter_unsignedp && inter_prec > inside_prec)
6921 == (final_unsignedp && final_prec > inter_prec))
6922 && ! (inside_ptr && inter_prec != final_prec)
6923 && ! (final_ptr && inside_prec != inter_prec)
6924 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6925 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6927 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6930 /* Handle (T *)&A.B.C for A being of type T and B and C
6931 living at offset zero. This occurs frequently in
6932 C++ upcasting and then accessing the base. */
6933 if (TREE_CODE (op0) == ADDR_EXPR
6934 && POINTER_TYPE_P (type)
6935 && handled_component_p (TREE_OPERAND (op0, 0)))
6937 HOST_WIDE_INT bitsize, bitpos;
6939 enum machine_mode mode;
6940 int unsignedp, volatilep;
6941 tree base = TREE_OPERAND (op0, 0);
6942 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6943 &mode, &unsignedp, &volatilep, false);
6944 /* If the reference was to a (constant) zero offset, we can use
6945 the address of the base if it has the same base type
6946 as the result type. */
6947 if (! offset && bitpos == 0
6948 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6949 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6950 return fold_convert (type, build_fold_addr_expr (base));
6953 if (TREE_CODE (op0) == MODIFY_EXPR
6954 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6955 /* Detect assigning a bitfield. */
6956 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6957 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6959 /* Don't leave an assignment inside a conversion
6960 unless assigning a bitfield. */
6961 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6962 /* First do the assignment, then return converted constant. */
6963 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6964 TREE_NO_WARNING (tem) = 1;
6965 TREE_USED (tem) = 1;
6969 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6970 constants (if x has signed type, the sign bit cannot be set
6971 in c). This folds extension into the BIT_AND_EXPR. */
6972 if (INTEGRAL_TYPE_P (type)
6973 && TREE_CODE (type) != BOOLEAN_TYPE
6974 && TREE_CODE (op0) == BIT_AND_EXPR
6975 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6978 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6981 if (TYPE_UNSIGNED (TREE_TYPE (and))
6982 || (TYPE_PRECISION (type)
6983 <= TYPE_PRECISION (TREE_TYPE (and))))
6985 else if (TYPE_PRECISION (TREE_TYPE (and1))
6986 <= HOST_BITS_PER_WIDE_INT
6987 && host_integerp (and1, 1))
6989 unsigned HOST_WIDE_INT cst;
6991 cst = tree_low_cst (and1, 1);
6992 cst &= (HOST_WIDE_INT) -1
6993 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6994 change = (cst == 0);
6995 #ifdef LOAD_EXTEND_OP
6997 && !flag_syntax_only
6998 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7001 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7002 and0 = fold_convert (uns, and0);
7003 and1 = fold_convert (uns, and1);
7009 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7010 TREE_INT_CST_HIGH (and1));
7011 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7012 TREE_CONSTANT_OVERFLOW (and1));
7013 return fold_build2 (BIT_AND_EXPR, type,
7014 fold_convert (type, and0), tem);
7018 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7019 T2 being pointers to types of the same size. */
7020 if (POINTER_TYPE_P (type)
7021 && BINARY_CLASS_P (arg0)
7022 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7023 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7025 tree arg00 = TREE_OPERAND (arg0, 0);
7027 tree t1 = TREE_TYPE (arg00);
7028 tree tt0 = TREE_TYPE (t0);
7029 tree tt1 = TREE_TYPE (t1);
7030 tree s0 = TYPE_SIZE (tt0);
7031 tree s1 = TYPE_SIZE (tt1);
7033 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7034 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7035 TREE_OPERAND (arg0, 1));
7038 tem = fold_convert_const (code, type, arg0);
7039 return tem ? tem : NULL_TREE;
7041 case VIEW_CONVERT_EXPR:
7042 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7043 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7047 if (negate_expr_p (arg0))
7048 return fold_convert (type, negate_expr (arg0));
7052 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7053 return fold_abs_const (arg0, type);
7054 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7055 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7056 /* Convert fabs((double)float) into (double)fabsf(float). */
7057 else if (TREE_CODE (arg0) == NOP_EXPR
7058 && TREE_CODE (type) == REAL_TYPE)
7060 tree targ0 = strip_float_extensions (arg0);
7062 return fold_convert (type, fold_build1 (ABS_EXPR,
7066 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7067 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7070 /* Strip sign ops from argument. */
7071 if (TREE_CODE (type) == REAL_TYPE)
7073 tem = fold_strip_sign_ops (arg0);
7075 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7080 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7081 return fold_convert (type, arg0);
7082 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7083 return build2 (COMPLEX_EXPR, type,
7084 TREE_OPERAND (arg0, 0),
7085 negate_expr (TREE_OPERAND (arg0, 1)));
7086 else if (TREE_CODE (arg0) == COMPLEX_CST)
7087 return build_complex (type, TREE_REALPART (arg0),
7088 negate_expr (TREE_IMAGPART (arg0)));
7089 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7090 return fold_build2 (TREE_CODE (arg0), type,
7091 fold_build1 (CONJ_EXPR, type,
7092 TREE_OPERAND (arg0, 0)),
7093 fold_build1 (CONJ_EXPR, type,
7094 TREE_OPERAND (arg0, 1)));
7095 else if (TREE_CODE (arg0) == CONJ_EXPR)
7096 return TREE_OPERAND (arg0, 0);
7100 if (TREE_CODE (arg0) == INTEGER_CST)
7101 return fold_not_const (arg0, type);
7102 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7103 return TREE_OPERAND (arg0, 0);
7104 /* Convert ~ (-A) to A - 1. */
7105 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7106 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7107 build_int_cst (type, 1));
7108 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7109 else if (INTEGRAL_TYPE_P (type)
7110 && ((TREE_CODE (arg0) == MINUS_EXPR
7111 && integer_onep (TREE_OPERAND (arg0, 1)))
7112 || (TREE_CODE (arg0) == PLUS_EXPR
7113 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7114 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7115 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7116 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7117 && (tem = fold_unary (BIT_NOT_EXPR, type,
7119 TREE_OPERAND (arg0, 0)))))
7120 return fold_build2 (BIT_XOR_EXPR, type, tem,
7121 fold_convert (type, TREE_OPERAND (arg0, 1)));
7122 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7123 && (tem = fold_unary (BIT_NOT_EXPR, type,
7125 TREE_OPERAND (arg0, 1)))))
7126 return fold_build2 (BIT_XOR_EXPR, type,
7127 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7131 case TRUTH_NOT_EXPR:
7132 /* The argument to invert_truthvalue must have Boolean type. */
7133 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7134 arg0 = fold_convert (boolean_type_node, arg0);
7136 /* Note that the operand of this must be an int
7137 and its values must be 0 or 1.
7138 ("true" is a fixed value perhaps depending on the language,
7139 but we don't handle values other than 1 correctly yet.) */
7140 tem = invert_truthvalue (arg0);
7141 /* Avoid infinite recursion. */
7142 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7144 return fold_convert (type, tem);
7147 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7149 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7150 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7151 TREE_OPERAND (arg0, 1));
7152 else if (TREE_CODE (arg0) == COMPLEX_CST)
7153 return TREE_REALPART (arg0);
7154 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7155 return fold_build2 (TREE_CODE (arg0), type,
7156 fold_build1 (REALPART_EXPR, type,
7157 TREE_OPERAND (arg0, 0)),
7158 fold_build1 (REALPART_EXPR, type,
7159 TREE_OPERAND (arg0, 1)));
7163 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7164 return fold_convert (type, integer_zero_node);
7165 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7166 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7167 TREE_OPERAND (arg0, 0));
7168 else if (TREE_CODE (arg0) == COMPLEX_CST)
7169 return TREE_IMAGPART (arg0);
7170 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7171 return fold_build2 (TREE_CODE (arg0), type,
7172 fold_build1 (IMAGPART_EXPR, type,
7173 TREE_OPERAND (arg0, 0)),
7174 fold_build1 (IMAGPART_EXPR, type,
7175 TREE_OPERAND (arg0, 1)));
7180 } /* switch (code) */
7183 /* Fold a binary expression of code CODE and type TYPE with operands
7184 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7185 Return the folded expression if folding is successful. Otherwise,
7186 return NULL_TREE. */
7189 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7191 enum tree_code compl_code;
7193 if (code == MIN_EXPR)
7194 compl_code = MAX_EXPR;
7195 else if (code == MAX_EXPR)
7196 compl_code = MIN_EXPR;
7200 /* MIN (MAX (a, b), b) == b. Â */
7201 if (TREE_CODE (op0) == compl_code
7202 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7203 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7205 /* MIN (MAX (b, a), b) == b. Â */
7206 if (TREE_CODE (op0) == compl_code
7207 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7208 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7209 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7211 /* MIN (a, MAX (a, b)) == a. Â */
7212 if (TREE_CODE (op1) == compl_code
7213 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7214 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7215 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7217 /* MIN (a, MAX (b, a)) == a. Â */
7218 if (TREE_CODE (op1) == compl_code
7219 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7220 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7221 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7226 /* Subroutine of fold_binary. This routine performs all of the
7227 transformations that are common to the equality/inequality
7228 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7229 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7230 fold_binary should call fold_binary. Fold a comparison with
7231 tree code CODE and type TYPE with operands OP0 and OP1. Return
7232 the folded comparison or NULL_TREE. */
7235 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7237 tree arg0, arg1, tem;
7242 STRIP_SIGN_NOPS (arg0);
7243 STRIP_SIGN_NOPS (arg1);
7245 tem = fold_relational_const (code, type, arg0, arg1);
7246 if (tem != NULL_TREE)
7249 /* If one arg is a real or integer constant, put it last. */
7250 if (tree_swap_operands_p (arg0, arg1, true))
7251 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7253 /* If this is a comparison of two exprs that look like an
7254 ARRAY_REF of the same object, then we can fold this to a
7255 comparison of the two offsets. */
7257 tree base0, offset0, base1, offset1;
7259 if (extract_array_ref (arg0, &base0, &offset0)
7260 && extract_array_ref (arg1, &base1, &offset1)
7261 && operand_equal_p (base0, base1, 0))
7263 /* Handle no offsets on both sides specially. */
7264 if (offset0 == NULL_TREE && offset1 == NULL_TREE)
7265 return fold_build2 (code, type, integer_zero_node,
7268 if (!offset0 || !offset1
7269 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
7271 if (offset0 == NULL_TREE)
7272 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
7273 if (offset1 == NULL_TREE)
7274 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
7275 return fold_build2 (code, type, offset0, offset1);
7280 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7281 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7282 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7283 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7284 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7285 && !(flag_wrapv || flag_trapv))
7286 && (TREE_CODE (arg1) == INTEGER_CST
7287 && !TREE_OVERFLOW (arg1)))
7289 tree const1 = TREE_OPERAND (arg0, 1);
7291 tree variable = TREE_OPERAND (arg0, 0);
7294 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7296 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7297 TREE_TYPE (arg1), const2, const1);
7298 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7299 && (TREE_CODE (lhs) != INTEGER_CST
7300 || !TREE_OVERFLOW (lhs)))
7301 return fold_build2 (code, type, variable, lhs);
7304 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7306 tree targ0 = strip_float_extensions (arg0);
7307 tree targ1 = strip_float_extensions (arg1);
7308 tree newtype = TREE_TYPE (targ0);
7310 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7311 newtype = TREE_TYPE (targ1);
7313 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7314 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7315 return fold_build2 (code, type, fold_convert (newtype, targ0),
7316 fold_convert (newtype, targ1));
7318 /* (-a) CMP (-b) -> b CMP a */
7319 if (TREE_CODE (arg0) == NEGATE_EXPR
7320 && TREE_CODE (arg1) == NEGATE_EXPR)
7321 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7322 TREE_OPERAND (arg0, 0));
7324 if (TREE_CODE (arg1) == REAL_CST)
7326 REAL_VALUE_TYPE cst;
7327 cst = TREE_REAL_CST (arg1);
7329 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7330 if (TREE_CODE (arg0) == NEGATE_EXPR)
7331 return fold_build2 (swap_tree_comparison (code), type,
7332 TREE_OPERAND (arg0, 0),
7333 build_real (TREE_TYPE (arg1),
7334 REAL_VALUE_NEGATE (cst)));
7336 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7337 /* a CMP (-0) -> a CMP 0 */
7338 if (REAL_VALUE_MINUS_ZERO (cst))
7339 return fold_build2 (code, type, arg0,
7340 build_real (TREE_TYPE (arg1), dconst0));
7342 /* x != NaN is always true, other ops are always false. */
7343 if (REAL_VALUE_ISNAN (cst)
7344 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7346 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7347 return omit_one_operand (type, tem, arg0);
7350 /* Fold comparisons against infinity. */
7351 if (REAL_VALUE_ISINF (cst))
7353 tem = fold_inf_compare (code, type, arg0, arg1);
7354 if (tem != NULL_TREE)
7359 /* If this is a comparison of a real constant with a PLUS_EXPR
7360 or a MINUS_EXPR of a real constant, we can convert it into a
7361 comparison with a revised real constant as long as no overflow
7362 occurs when unsafe_math_optimizations are enabled. */
7363 if (flag_unsafe_math_optimizations
7364 && TREE_CODE (arg1) == REAL_CST
7365 && (TREE_CODE (arg0) == PLUS_EXPR
7366 || TREE_CODE (arg0) == MINUS_EXPR)
7367 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7368 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7369 ? MINUS_EXPR : PLUS_EXPR,
7370 arg1, TREE_OPERAND (arg0, 1), 0))
7371 && ! TREE_CONSTANT_OVERFLOW (tem))
7372 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7374 /* Likewise, we can simplify a comparison of a real constant with
7375 a MINUS_EXPR whose first operand is also a real constant, i.e.
7376 (c1 - x) < c2 becomes x > c1-c2. */
7377 if (flag_unsafe_math_optimizations
7378 && TREE_CODE (arg1) == REAL_CST
7379 && TREE_CODE (arg0) == MINUS_EXPR
7380 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7381 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7383 && ! TREE_CONSTANT_OVERFLOW (tem))
7384 return fold_build2 (swap_tree_comparison (code), type,
7385 TREE_OPERAND (arg0, 1), tem);
7387 /* Fold comparisons against built-in math functions. */
7388 if (TREE_CODE (arg1) == REAL_CST
7389 && flag_unsafe_math_optimizations
7390 && ! flag_errno_math)
7392 enum built_in_function fcode = builtin_mathfn_code (arg0);
7394 if (fcode != END_BUILTINS)
7396 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7397 if (tem != NULL_TREE)
7403 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7404 if (TREE_CONSTANT (arg1)
7405 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7406 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7407 /* This optimization is invalid for ordered comparisons
7408 if CONST+INCR overflows or if foo+incr might overflow.
7409 This optimization is invalid for floating point due to rounding.
7410 For pointer types we assume overflow doesn't happen. */
7411 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7412 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7413 && (code == EQ_EXPR || code == NE_EXPR))))
7415 tree varop, newconst;
7417 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7419 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7420 arg1, TREE_OPERAND (arg0, 1));
7421 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7422 TREE_OPERAND (arg0, 0),
7423 TREE_OPERAND (arg0, 1));
7427 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7428 arg1, TREE_OPERAND (arg0, 1));
7429 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7430 TREE_OPERAND (arg0, 0),
7431 TREE_OPERAND (arg0, 1));
7435 /* If VAROP is a reference to a bitfield, we must mask
7436 the constant by the width of the field. */
7437 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7438 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7439 && host_integerp (DECL_SIZE (TREE_OPERAND
7440 (TREE_OPERAND (varop, 0), 1)), 1))
7442 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7443 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7444 tree folded_compare, shift;
7446 /* First check whether the comparison would come out
7447 always the same. If we don't do that we would
7448 change the meaning with the masking. */
7449 folded_compare = fold_build2 (code, type,
7450 TREE_OPERAND (varop, 0), arg1);
7451 if (TREE_CODE (folded_compare) == INTEGER_CST)
7452 return omit_one_operand (type, folded_compare, varop);
7454 shift = build_int_cst (NULL_TREE,
7455 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7456 shift = fold_convert (TREE_TYPE (varop), shift);
7457 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7459 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7463 return fold_build2 (code, type, varop, newconst);
7466 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7467 && (TREE_CODE (arg0) == NOP_EXPR
7468 || TREE_CODE (arg0) == CONVERT_EXPR))
7470 /* If we are widening one operand of an integer comparison,
7471 see if the other operand is similarly being widened. Perhaps we
7472 can do the comparison in the narrower type. */
7473 tem = fold_widened_comparison (code, type, arg0, arg1);
7477 /* Or if we are changing signedness. */
7478 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
7483 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7484 constant, we can simplify it. */
7485 if (TREE_CODE (arg1) == INTEGER_CST
7486 && (TREE_CODE (arg0) == MIN_EXPR
7487 || TREE_CODE (arg0) == MAX_EXPR)
7488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7490 tem = optimize_minmax_comparison (code, type, op0, op1);
7495 /* Simplify comparison of something with itself. (For IEEE
7496 floating-point, we can only do some of these simplifications.) */
7497 if (operand_equal_p (arg0, arg1, 0))
7502 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7503 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7504 return constant_boolean_node (1, type);
7509 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7510 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7511 return constant_boolean_node (1, type);
7512 return fold_build2 (EQ_EXPR, type, arg0, arg1);
7515 /* For NE, we can only do this simplification if integer
7516 or we don't honor IEEE floating point NaNs. */
7517 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7518 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7520 /* ... fall through ... */
7523 return constant_boolean_node (0, type);
7529 /* If we are comparing an expression that just has comparisons
7530 of two integer values, arithmetic expressions of those comparisons,
7531 and constants, we can simplify it. There are only three cases
7532 to check: the two values can either be equal, the first can be
7533 greater, or the second can be greater. Fold the expression for
7534 those three values. Since each value must be 0 or 1, we have
7535 eight possibilities, each of which corresponds to the constant 0
7536 or 1 or one of the six possible comparisons.
7538 This handles common cases like (a > b) == 0 but also handles
7539 expressions like ((x > y) - (y > x)) > 0, which supposedly
7540 occur in macroized code. */
7542 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7544 tree cval1 = 0, cval2 = 0;
7547 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7548 /* Don't handle degenerate cases here; they should already
7549 have been handled anyway. */
7550 && cval1 != 0 && cval2 != 0
7551 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7552 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7553 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7554 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7555 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7556 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7557 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7559 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7560 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7562 /* We can't just pass T to eval_subst in case cval1 or cval2
7563 was the same as ARG1. */
7566 = fold_build2 (code, type,
7567 eval_subst (arg0, cval1, maxval,
7571 = fold_build2 (code, type,
7572 eval_subst (arg0, cval1, maxval,
7576 = fold_build2 (code, type,
7577 eval_subst (arg0, cval1, minval,
7581 /* All three of these results should be 0 or 1. Confirm they are.
7582 Then use those values to select the proper code to use. */
7584 if (TREE_CODE (high_result) == INTEGER_CST
7585 && TREE_CODE (equal_result) == INTEGER_CST
7586 && TREE_CODE (low_result) == INTEGER_CST)
7588 /* Make a 3-bit mask with the high-order bit being the
7589 value for `>', the next for '=', and the low for '<'. */
7590 switch ((integer_onep (high_result) * 4)
7591 + (integer_onep (equal_result) * 2)
7592 + integer_onep (low_result))
7596 return omit_one_operand (type, integer_zero_node, arg0);
7617 return omit_one_operand (type, integer_one_node, arg0);
7621 return save_expr (build2 (code, type, cval1, cval2));
7622 return fold_build2 (code, type, cval1, cval2);
7627 /* Fold a comparison of the address of COMPONENT_REFs with the same
7628 type and component to a comparison of the address of the base
7629 object. In short, &x->a OP &y->a to x OP y and
7630 &x->a OP &y.a to x OP &y */
7631 if (TREE_CODE (arg0) == ADDR_EXPR
7632 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
7633 && TREE_CODE (arg1) == ADDR_EXPR
7634 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
7636 tree cref0 = TREE_OPERAND (arg0, 0);
7637 tree cref1 = TREE_OPERAND (arg1, 0);
7638 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
7640 tree op0 = TREE_OPERAND (cref0, 0);
7641 tree op1 = TREE_OPERAND (cref1, 0);
7642 return fold_build2 (code, type,
7643 build_fold_addr_expr (op0),
7644 build_fold_addr_expr (op1));
7648 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
7649 into a single range test. */
7650 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
7651 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
7652 && TREE_CODE (arg1) == INTEGER_CST
7653 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7654 && !integer_zerop (TREE_OPERAND (arg0, 1))
7655 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7656 && !TREE_OVERFLOW (arg1))
7658 tem = fold_div_compare (code, type, arg0, arg1);
7659 if (tem != NULL_TREE)
7666 /* Fold a binary expression of code CODE and type TYPE with operands
7667 OP0 and OP1. Return the folded expression if folding is
7668 successful. Otherwise, return NULL_TREE. */
7671 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7673 enum tree_code_class kind = TREE_CODE_CLASS (code);
7674 tree arg0, arg1, tem;
7675 tree t1 = NULL_TREE;
7677 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7678 && TREE_CODE_LENGTH (code) == 2
7680 && op1 != NULL_TREE);
7685 /* Strip any conversions that don't change the mode. This is
7686 safe for every expression, except for a comparison expression
7687 because its signedness is derived from its operands. So, in
7688 the latter case, only strip conversions that don't change the
7691 Note that this is done as an internal manipulation within the
7692 constant folder, in order to find the simplest representation
7693 of the arguments so that their form can be studied. In any
7694 cases, the appropriate type conversions should be put back in
7695 the tree that will get out of the constant folder. */
7697 if (kind == tcc_comparison)
7699 STRIP_SIGN_NOPS (arg0);
7700 STRIP_SIGN_NOPS (arg1);
7708 /* Note that TREE_CONSTANT isn't enough: static var addresses are
7709 constant but we can't do arithmetic on them. */
7710 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7711 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7712 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
7713 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
7715 if (kind == tcc_binary)
7716 tem = const_binop (code, arg0, arg1, 0);
7717 else if (kind == tcc_comparison)
7718 tem = fold_relational_const (code, type, arg0, arg1);
7722 if (tem != NULL_TREE)
7724 if (TREE_TYPE (tem) != type)
7725 tem = fold_convert (type, tem);
7730 /* If this is a commutative operation, and ARG0 is a constant, move it
7731 to ARG1 to reduce the number of tests below. */
7732 if (commutative_tree_code (code)
7733 && tree_swap_operands_p (arg0, arg1, true))
7734 return fold_build2 (code, type, op1, op0);
7736 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
7738 First check for cases where an arithmetic operation is applied to a
7739 compound, conditional, or comparison operation. Push the arithmetic
7740 operation inside the compound or conditional to see if any folding
7741 can then be done. Convert comparison to conditional for this purpose.
7742 The also optimizes non-constant cases that used to be done in
7745 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7746 one of the operands is a comparison and the other is a comparison, a
7747 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7748 code below would make the expression more complex. Change it to a
7749 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7750 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7752 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7753 || code == EQ_EXPR || code == NE_EXPR)
7754 && ((truth_value_p (TREE_CODE (arg0))
7755 && (truth_value_p (TREE_CODE (arg1))
7756 || (TREE_CODE (arg1) == BIT_AND_EXPR
7757 && integer_onep (TREE_OPERAND (arg1, 1)))))
7758 || (truth_value_p (TREE_CODE (arg1))
7759 && (truth_value_p (TREE_CODE (arg0))
7760 || (TREE_CODE (arg0) == BIT_AND_EXPR
7761 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7763 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7764 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7767 fold_convert (boolean_type_node, arg0),
7768 fold_convert (boolean_type_node, arg1));
7770 if (code == EQ_EXPR)
7771 tem = invert_truthvalue (tem);
7773 return fold_convert (type, tem);
7776 if (TREE_CODE_CLASS (code) == tcc_binary
7777 || TREE_CODE_CLASS (code) == tcc_comparison)
7779 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7780 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7781 fold_build2 (code, type,
7782 TREE_OPERAND (arg0, 1), op1));
7783 if (TREE_CODE (arg1) == COMPOUND_EXPR
7784 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7785 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7786 fold_build2 (code, type,
7787 op0, TREE_OPERAND (arg1, 1)));
7789 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7791 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7793 /*cond_first_p=*/1);
7794 if (tem != NULL_TREE)
7798 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7800 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7802 /*cond_first_p=*/0);
7803 if (tem != NULL_TREE)
7811 /* A + (-B) -> A - B */
7812 if (TREE_CODE (arg1) == NEGATE_EXPR)
7813 return fold_build2 (MINUS_EXPR, type,
7814 fold_convert (type, arg0),
7815 fold_convert (type, TREE_OPERAND (arg1, 0)));
7816 /* (-A) + B -> B - A */
7817 if (TREE_CODE (arg0) == NEGATE_EXPR
7818 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7819 return fold_build2 (MINUS_EXPR, type,
7820 fold_convert (type, arg1),
7821 fold_convert (type, TREE_OPERAND (arg0, 0)));
7822 /* Convert ~A + 1 to -A. */
7823 if (INTEGRAL_TYPE_P (type)
7824 && TREE_CODE (arg0) == BIT_NOT_EXPR
7825 && integer_onep (arg1))
7826 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7828 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7830 if ((TREE_CODE (arg0) == MULT_EXPR
7831 || TREE_CODE (arg1) == MULT_EXPR)
7832 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7834 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7839 if (! FLOAT_TYPE_P (type))
7841 if (integer_zerop (arg1))
7842 return non_lvalue (fold_convert (type, arg0));
7844 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7845 with a constant, and the two constants have no bits in common,
7846 we should treat this as a BIT_IOR_EXPR since this may produce more
7848 if (TREE_CODE (arg0) == BIT_AND_EXPR
7849 && TREE_CODE (arg1) == BIT_AND_EXPR
7850 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7851 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7852 && integer_zerop (const_binop (BIT_AND_EXPR,
7853 TREE_OPERAND (arg0, 1),
7854 TREE_OPERAND (arg1, 1), 0)))
7856 code = BIT_IOR_EXPR;
7860 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7861 (plus (plus (mult) (mult)) (foo)) so that we can
7862 take advantage of the factoring cases below. */
7863 if (((TREE_CODE (arg0) == PLUS_EXPR
7864 || TREE_CODE (arg0) == MINUS_EXPR)
7865 && TREE_CODE (arg1) == MULT_EXPR)
7866 || ((TREE_CODE (arg1) == PLUS_EXPR
7867 || TREE_CODE (arg1) == MINUS_EXPR)
7868 && TREE_CODE (arg0) == MULT_EXPR))
7870 tree parg0, parg1, parg, marg;
7871 enum tree_code pcode;
7873 if (TREE_CODE (arg1) == MULT_EXPR)
7874 parg = arg0, marg = arg1;
7876 parg = arg1, marg = arg0;
7877 pcode = TREE_CODE (parg);
7878 parg0 = TREE_OPERAND (parg, 0);
7879 parg1 = TREE_OPERAND (parg, 1);
7883 if (TREE_CODE (parg0) == MULT_EXPR
7884 && TREE_CODE (parg1) != MULT_EXPR)
7885 return fold_build2 (pcode, type,
7886 fold_build2 (PLUS_EXPR, type,
7887 fold_convert (type, parg0),
7888 fold_convert (type, marg)),
7889 fold_convert (type, parg1));
7890 if (TREE_CODE (parg0) != MULT_EXPR
7891 && TREE_CODE (parg1) == MULT_EXPR)
7892 return fold_build2 (PLUS_EXPR, type,
7893 fold_convert (type, parg0),
7894 fold_build2 (pcode, type,
7895 fold_convert (type, marg),
7900 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7901 of the array. Loop optimizer sometimes produce this type of
7903 if (TREE_CODE (arg0) == ADDR_EXPR)
7905 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7907 return fold_convert (type, tem);
7909 else if (TREE_CODE (arg1) == ADDR_EXPR)
7911 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7913 return fold_convert (type, tem);
7918 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7919 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7920 return non_lvalue (fold_convert (type, arg0));
7922 /* Likewise if the operands are reversed. */
7923 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7924 return non_lvalue (fold_convert (type, arg1));
7926 /* Convert X + -C into X - C. */
7927 if (TREE_CODE (arg1) == REAL_CST
7928 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7930 tem = fold_negate_const (arg1, type);
7931 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7932 return fold_build2 (MINUS_EXPR, type,
7933 fold_convert (type, arg0),
7934 fold_convert (type, tem));
7937 if (flag_unsafe_math_optimizations
7938 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7939 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7940 && (tem = distribute_real_division (code, type, arg0, arg1)))
7943 /* Convert x+x into x*2.0. */
7944 if (operand_equal_p (arg0, arg1, 0)
7945 && SCALAR_FLOAT_TYPE_P (type))
7946 return fold_build2 (MULT_EXPR, type, arg0,
7947 build_real (type, dconst2));
7949 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7950 if (flag_unsafe_math_optimizations
7951 && TREE_CODE (arg1) == PLUS_EXPR
7952 && TREE_CODE (arg0) != MULT_EXPR)
7954 tree tree10 = TREE_OPERAND (arg1, 0);
7955 tree tree11 = TREE_OPERAND (arg1, 1);
7956 if (TREE_CODE (tree11) == MULT_EXPR
7957 && TREE_CODE (tree10) == MULT_EXPR)
7960 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7961 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7964 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7965 if (flag_unsafe_math_optimizations
7966 && TREE_CODE (arg0) == PLUS_EXPR
7967 && TREE_CODE (arg1) != MULT_EXPR)
7969 tree tree00 = TREE_OPERAND (arg0, 0);
7970 tree tree01 = TREE_OPERAND (arg0, 1);
7971 if (TREE_CODE (tree01) == MULT_EXPR
7972 && TREE_CODE (tree00) == MULT_EXPR)
7975 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7976 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7982 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7983 is a rotate of A by C1 bits. */
7984 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7985 is a rotate of A by B bits. */
7987 enum tree_code code0, code1;
7988 code0 = TREE_CODE (arg0);
7989 code1 = TREE_CODE (arg1);
7990 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7991 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7992 && operand_equal_p (TREE_OPERAND (arg0, 0),
7993 TREE_OPERAND (arg1, 0), 0)
7994 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7996 tree tree01, tree11;
7997 enum tree_code code01, code11;
7999 tree01 = TREE_OPERAND (arg0, 1);
8000 tree11 = TREE_OPERAND (arg1, 1);
8001 STRIP_NOPS (tree01);
8002 STRIP_NOPS (tree11);
8003 code01 = TREE_CODE (tree01);
8004 code11 = TREE_CODE (tree11);
8005 if (code01 == INTEGER_CST
8006 && code11 == INTEGER_CST
8007 && TREE_INT_CST_HIGH (tree01) == 0
8008 && TREE_INT_CST_HIGH (tree11) == 0
8009 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8010 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8011 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8012 code0 == LSHIFT_EXPR ? tree01 : tree11);
8013 else if (code11 == MINUS_EXPR)
8015 tree tree110, tree111;
8016 tree110 = TREE_OPERAND (tree11, 0);
8017 tree111 = TREE_OPERAND (tree11, 1);
8018 STRIP_NOPS (tree110);
8019 STRIP_NOPS (tree111);
8020 if (TREE_CODE (tree110) == INTEGER_CST
8021 && 0 == compare_tree_int (tree110,
8023 (TREE_TYPE (TREE_OPERAND
8025 && operand_equal_p (tree01, tree111, 0))
8026 return build2 ((code0 == LSHIFT_EXPR
8029 type, TREE_OPERAND (arg0, 0), tree01);
8031 else if (code01 == MINUS_EXPR)
8033 tree tree010, tree011;
8034 tree010 = TREE_OPERAND (tree01, 0);
8035 tree011 = TREE_OPERAND (tree01, 1);
8036 STRIP_NOPS (tree010);
8037 STRIP_NOPS (tree011);
8038 if (TREE_CODE (tree010) == INTEGER_CST
8039 && 0 == compare_tree_int (tree010,
8041 (TREE_TYPE (TREE_OPERAND
8043 && operand_equal_p (tree11, tree011, 0))
8044 return build2 ((code0 != LSHIFT_EXPR
8047 type, TREE_OPERAND (arg0, 0), tree11);
8053 /* In most languages, can't associate operations on floats through
8054 parentheses. Rather than remember where the parentheses were, we
8055 don't associate floats at all, unless the user has specified
8056 -funsafe-math-optimizations. */
8058 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8060 tree var0, con0, lit0, minus_lit0;
8061 tree var1, con1, lit1, minus_lit1;
8063 /* Split both trees into variables, constants, and literals. Then
8064 associate each group together, the constants with literals,
8065 then the result with variables. This increases the chances of
8066 literals being recombined later and of generating relocatable
8067 expressions for the sum of a constant and literal. */
8068 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8069 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8070 code == MINUS_EXPR);
8072 /* Only do something if we found more than two objects. Otherwise,
8073 nothing has changed and we risk infinite recursion. */
8074 if (2 < ((var0 != 0) + (var1 != 0)
8075 + (con0 != 0) + (con1 != 0)
8076 + (lit0 != 0) + (lit1 != 0)
8077 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8079 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8080 if (code == MINUS_EXPR)
8083 var0 = associate_trees (var0, var1, code, type);
8084 con0 = associate_trees (con0, con1, code, type);
8085 lit0 = associate_trees (lit0, lit1, code, type);
8086 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8088 /* Preserve the MINUS_EXPR if the negative part of the literal is
8089 greater than the positive part. Otherwise, the multiplicative
8090 folding code (i.e extract_muldiv) may be fooled in case
8091 unsigned constants are subtracted, like in the following
8092 example: ((X*2 + 4) - 8U)/2. */
8093 if (minus_lit0 && lit0)
8095 if (TREE_CODE (lit0) == INTEGER_CST
8096 && TREE_CODE (minus_lit0) == INTEGER_CST
8097 && tree_int_cst_lt (lit0, minus_lit0))
8099 minus_lit0 = associate_trees (minus_lit0, lit0,
8105 lit0 = associate_trees (lit0, minus_lit0,
8113 return fold_convert (type,
8114 associate_trees (var0, minus_lit0,
8118 con0 = associate_trees (con0, minus_lit0,
8120 return fold_convert (type,
8121 associate_trees (var0, con0,
8126 con0 = associate_trees (con0, lit0, code, type);
8127 return fold_convert (type, associate_trees (var0, con0,
8135 /* A - (-B) -> A + B */
8136 if (TREE_CODE (arg1) == NEGATE_EXPR)
8137 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8138 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8139 if (TREE_CODE (arg0) == NEGATE_EXPR
8140 && (FLOAT_TYPE_P (type)
8141 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8142 && negate_expr_p (arg1)
8143 && reorder_operands_p (arg0, arg1))
8144 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8145 TREE_OPERAND (arg0, 0));
8146 /* Convert -A - 1 to ~A. */
8147 if (INTEGRAL_TYPE_P (type)
8148 && TREE_CODE (arg0) == NEGATE_EXPR
8149 && integer_onep (arg1))
8150 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
8152 /* Convert -1 - A to ~A. */
8153 if (INTEGRAL_TYPE_P (type)
8154 && integer_all_onesp (arg0))
8155 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8157 if (! FLOAT_TYPE_P (type))
8159 if (integer_zerop (arg0))
8160 return negate_expr (fold_convert (type, arg1));
8161 if (integer_zerop (arg1))
8162 return non_lvalue (fold_convert (type, arg0));
8164 /* Fold A - (A & B) into ~B & A. */
8165 if (!TREE_SIDE_EFFECTS (arg0)
8166 && TREE_CODE (arg1) == BIT_AND_EXPR)
8168 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8169 return fold_build2 (BIT_AND_EXPR, type,
8170 fold_build1 (BIT_NOT_EXPR, type,
8171 TREE_OPERAND (arg1, 0)),
8173 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8174 return fold_build2 (BIT_AND_EXPR, type,
8175 fold_build1 (BIT_NOT_EXPR, type,
8176 TREE_OPERAND (arg1, 1)),
8180 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8181 any power of 2 minus 1. */
8182 if (TREE_CODE (arg0) == BIT_AND_EXPR
8183 && TREE_CODE (arg1) == BIT_AND_EXPR
8184 && operand_equal_p (TREE_OPERAND (arg0, 0),
8185 TREE_OPERAND (arg1, 0), 0))
8187 tree mask0 = TREE_OPERAND (arg0, 1);
8188 tree mask1 = TREE_OPERAND (arg1, 1);
8189 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8191 if (operand_equal_p (tem, mask1, 0))
8193 tem = fold_build2 (BIT_XOR_EXPR, type,
8194 TREE_OPERAND (arg0, 0), mask1);
8195 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8200 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8201 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8202 return non_lvalue (fold_convert (type, arg0));
8204 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8205 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8206 (-ARG1 + ARG0) reduces to -ARG1. */
8207 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8208 return negate_expr (fold_convert (type, arg1));
8210 /* Fold &x - &x. This can happen from &x.foo - &x.
8211 This is unsafe for certain floats even in non-IEEE formats.
8212 In IEEE, it is unsafe because it does wrong for NaNs.
8213 Also note that operand_equal_p is always false if an operand
8216 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8217 && operand_equal_p (arg0, arg1, 0))
8218 return fold_convert (type, integer_zero_node);
8220 /* A - B -> A + (-B) if B is easily negatable. */
8221 if (negate_expr_p (arg1)
8222 && ((FLOAT_TYPE_P (type)
8223 /* Avoid this transformation if B is a positive REAL_CST. */
8224 && (TREE_CODE (arg1) != REAL_CST
8225 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8226 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8227 return fold_build2 (PLUS_EXPR, type,
8228 fold_convert (type, arg0),
8229 fold_convert (type, negate_expr (arg1)));
8231 /* Try folding difference of addresses. */
8235 if ((TREE_CODE (arg0) == ADDR_EXPR
8236 || TREE_CODE (arg1) == ADDR_EXPR)
8237 && ptr_difference_const (arg0, arg1, &diff))
8238 return build_int_cst_type (type, diff);
8241 /* Fold &a[i] - &a[j] to i-j. */
8242 if (TREE_CODE (arg0) == ADDR_EXPR
8243 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8244 && TREE_CODE (arg1) == ADDR_EXPR
8245 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8247 tree aref0 = TREE_OPERAND (arg0, 0);
8248 tree aref1 = TREE_OPERAND (arg1, 0);
8249 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8250 TREE_OPERAND (aref1, 0), 0))
8252 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8253 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8254 tree esz = array_ref_element_size (aref0);
8255 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8256 return fold_build2 (MULT_EXPR, type, diff,
8257 fold_convert (type, esz));
8262 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8263 of the array. Loop optimizer sometimes produce this type of
8265 if (TREE_CODE (arg0) == ADDR_EXPR)
8267 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8269 return fold_convert (type, tem);
8272 if (flag_unsafe_math_optimizations
8273 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8274 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8275 && (tem = distribute_real_division (code, type, arg0, arg1)))
8278 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8280 if ((TREE_CODE (arg0) == MULT_EXPR
8281 || TREE_CODE (arg1) == MULT_EXPR)
8282 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8284 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8292 /* (-A) * (-B) -> A * B */
8293 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8294 return fold_build2 (MULT_EXPR, type,
8295 TREE_OPERAND (arg0, 0),
8296 negate_expr (arg1));
8297 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8298 return fold_build2 (MULT_EXPR, type,
8300 TREE_OPERAND (arg1, 0));
8302 if (! FLOAT_TYPE_P (type))
8304 if (integer_zerop (arg1))
8305 return omit_one_operand (type, arg1, arg0);
8306 if (integer_onep (arg1))
8307 return non_lvalue (fold_convert (type, arg0));
8308 /* Transform x * -1 into -x. */
8309 if (integer_all_onesp (arg1))
8310 return fold_convert (type, negate_expr (arg0));
8312 /* (a * (1 << b)) is (a << b) */
8313 if (TREE_CODE (arg1) == LSHIFT_EXPR
8314 && integer_onep (TREE_OPERAND (arg1, 0)))
8315 return fold_build2 (LSHIFT_EXPR, type, arg0,
8316 TREE_OPERAND (arg1, 1));
8317 if (TREE_CODE (arg0) == LSHIFT_EXPR
8318 && integer_onep (TREE_OPERAND (arg0, 0)))
8319 return fold_build2 (LSHIFT_EXPR, type, arg1,
8320 TREE_OPERAND (arg0, 1));
8322 if (TREE_CODE (arg1) == INTEGER_CST
8323 && 0 != (tem = extract_muldiv (op0,
8324 fold_convert (type, arg1),
8326 return fold_convert (type, tem);
8331 /* Maybe fold x * 0 to 0. The expressions aren't the same
8332 when x is NaN, since x * 0 is also NaN. Nor are they the
8333 same in modes with signed zeros, since multiplying a
8334 negative value by 0 gives -0, not +0. */
8335 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8336 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8337 && real_zerop (arg1))
8338 return omit_one_operand (type, arg1, arg0);
8339 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8340 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8341 && real_onep (arg1))
8342 return non_lvalue (fold_convert (type, arg0));
8344 /* Transform x * -1.0 into -x. */
8345 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8346 && real_minus_onep (arg1))
8347 return fold_convert (type, negate_expr (arg0));
8349 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8350 if (flag_unsafe_math_optimizations
8351 && TREE_CODE (arg0) == RDIV_EXPR
8352 && TREE_CODE (arg1) == REAL_CST
8353 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8355 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8358 return fold_build2 (RDIV_EXPR, type, tem,
8359 TREE_OPERAND (arg0, 1));
8362 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8363 if (operand_equal_p (arg0, arg1, 0))
8365 tree tem = fold_strip_sign_ops (arg0);
8366 if (tem != NULL_TREE)
8368 tem = fold_convert (type, tem);
8369 return fold_build2 (MULT_EXPR, type, tem, tem);
8373 if (flag_unsafe_math_optimizations)
8375 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8376 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8378 /* Optimizations of root(...)*root(...). */
8379 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8381 tree rootfn, arg, arglist;
8382 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8383 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8385 /* Optimize sqrt(x)*sqrt(x) as x. */
8386 if (BUILTIN_SQRT_P (fcode0)
8387 && operand_equal_p (arg00, arg10, 0)
8388 && ! HONOR_SNANS (TYPE_MODE (type)))
8391 /* Optimize root(x)*root(y) as root(x*y). */
8392 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8393 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8394 arglist = build_tree_list (NULL_TREE, arg);
8395 return build_function_call_expr (rootfn, arglist);
8398 /* Optimize expN(x)*expN(y) as expN(x+y). */
8399 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8401 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8402 tree arg = fold_build2 (PLUS_EXPR, type,
8403 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8404 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8405 tree arglist = build_tree_list (NULL_TREE, arg);
8406 return build_function_call_expr (expfn, arglist);
8409 /* Optimizations of pow(...)*pow(...). */
8410 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8411 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8412 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8414 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8415 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8417 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8418 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8421 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8422 if (operand_equal_p (arg01, arg11, 0))
8424 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8425 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8426 tree arglist = tree_cons (NULL_TREE, arg,
8427 build_tree_list (NULL_TREE,
8429 return build_function_call_expr (powfn, arglist);
8432 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8433 if (operand_equal_p (arg00, arg10, 0))
8435 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8436 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8437 tree arglist = tree_cons (NULL_TREE, arg00,
8438 build_tree_list (NULL_TREE,
8440 return build_function_call_expr (powfn, arglist);
8444 /* Optimize tan(x)*cos(x) as sin(x). */
8445 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8446 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8447 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8448 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8449 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8450 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8451 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8452 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8454 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8456 if (sinfn != NULL_TREE)
8457 return build_function_call_expr (sinfn,
8458 TREE_OPERAND (arg0, 1));
8461 /* Optimize x*pow(x,c) as pow(x,c+1). */
8462 if (fcode1 == BUILT_IN_POW
8463 || fcode1 == BUILT_IN_POWF
8464 || fcode1 == BUILT_IN_POWL)
8466 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8467 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8469 if (TREE_CODE (arg11) == REAL_CST
8470 && ! TREE_CONSTANT_OVERFLOW (arg11)
8471 && operand_equal_p (arg0, arg10, 0))
8473 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8477 c = TREE_REAL_CST (arg11);
8478 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8479 arg = build_real (type, c);
8480 arglist = build_tree_list (NULL_TREE, arg);
8481 arglist = tree_cons (NULL_TREE, arg0, arglist);
8482 return build_function_call_expr (powfn, arglist);
8486 /* Optimize pow(x,c)*x as pow(x,c+1). */
8487 if (fcode0 == BUILT_IN_POW
8488 || fcode0 == BUILT_IN_POWF
8489 || fcode0 == BUILT_IN_POWL)
8491 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8492 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8494 if (TREE_CODE (arg01) == REAL_CST
8495 && ! TREE_CONSTANT_OVERFLOW (arg01)
8496 && operand_equal_p (arg1, arg00, 0))
8498 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8502 c = TREE_REAL_CST (arg01);
8503 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8504 arg = build_real (type, c);
8505 arglist = build_tree_list (NULL_TREE, arg);
8506 arglist = tree_cons (NULL_TREE, arg1, arglist);
8507 return build_function_call_expr (powfn, arglist);
8511 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8513 && operand_equal_p (arg0, arg1, 0))
8515 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8519 tree arg = build_real (type, dconst2);
8520 tree arglist = build_tree_list (NULL_TREE, arg);
8521 arglist = tree_cons (NULL_TREE, arg0, arglist);
8522 return build_function_call_expr (powfn, arglist);
8531 if (integer_all_onesp (arg1))
8532 return omit_one_operand (type, arg1, arg0);
8533 if (integer_zerop (arg1))
8534 return non_lvalue (fold_convert (type, arg0));
8535 if (operand_equal_p (arg0, arg1, 0))
8536 return non_lvalue (fold_convert (type, arg0));
8539 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8540 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8542 t1 = build_int_cst (type, -1);
8543 t1 = force_fit_type (t1, 0, false, false);
8544 return omit_one_operand (type, t1, arg1);
8548 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8549 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8551 t1 = build_int_cst (type, -1);
8552 t1 = force_fit_type (t1, 0, false, false);
8553 return omit_one_operand (type, t1, arg0);
8556 /* Canonicalize (X & C1) | C2. */
8557 if (TREE_CODE (arg0) == BIT_AND_EXPR
8558 && TREE_CODE (arg1) == INTEGER_CST
8559 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8561 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
8562 int width = TYPE_PRECISION (type);
8563 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
8564 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8565 hi2 = TREE_INT_CST_HIGH (arg1);
8566 lo2 = TREE_INT_CST_LOW (arg1);
8568 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
8569 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
8570 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8572 if (width > HOST_BITS_PER_WIDE_INT)
8574 mhi = (unsigned HOST_WIDE_INT) -1
8575 >> (2 * HOST_BITS_PER_WIDE_INT - width);
8581 mlo = (unsigned HOST_WIDE_INT) -1
8582 >> (HOST_BITS_PER_WIDE_INT - width);
8585 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
8586 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
8587 return fold_build2 (BIT_IOR_EXPR, type,
8588 TREE_OPERAND (arg0, 0), arg1);
8590 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
8593 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
8594 return fold_build2 (BIT_IOR_EXPR, type,
8595 fold_build2 (BIT_AND_EXPR, type,
8596 TREE_OPERAND (arg0, 0),
8597 build_int_cst_wide (type,
8603 /* (X & Y) | Y is (X, Y). */
8604 if (TREE_CODE (arg0) == BIT_AND_EXPR
8605 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8606 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8607 /* (X & Y) | X is (Y, X). */
8608 if (TREE_CODE (arg0) == BIT_AND_EXPR
8609 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8610 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
8611 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
8612 /* X | (X & Y) is (Y, X). */
8613 if (TREE_CODE (arg1) == BIT_AND_EXPR
8614 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8615 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
8616 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
8617 /* X | (Y & X) is (Y, X). */
8618 if (TREE_CODE (arg1) == BIT_AND_EXPR
8619 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
8620 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8621 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
8623 t1 = distribute_bit_expr (code, type, arg0, arg1);
8624 if (t1 != NULL_TREE)
8627 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8629 This results in more efficient code for machines without a NAND
8630 instruction. Combine will canonicalize to the first form
8631 which will allow use of NAND instructions provided by the
8632 backend if they exist. */
8633 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8634 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8636 return fold_build1 (BIT_NOT_EXPR, type,
8637 build2 (BIT_AND_EXPR, type,
8638 TREE_OPERAND (arg0, 0),
8639 TREE_OPERAND (arg1, 0)));
8642 /* See if this can be simplified into a rotate first. If that
8643 is unsuccessful continue in the association code. */
8647 if (integer_zerop (arg1))
8648 return non_lvalue (fold_convert (type, arg0));
8649 if (integer_all_onesp (arg1))
8650 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8651 if (operand_equal_p (arg0, arg1, 0))
8652 return omit_one_operand (type, integer_zero_node, arg0);
8655 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8656 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8658 t1 = build_int_cst (type, -1);
8659 t1 = force_fit_type (t1, 0, false, false);
8660 return omit_one_operand (type, t1, arg1);
8664 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8665 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8667 t1 = build_int_cst (type, -1);
8668 t1 = force_fit_type (t1, 0, false, false);
8669 return omit_one_operand (type, t1, arg0);
8672 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8673 with a constant, and the two constants have no bits in common,
8674 we should treat this as a BIT_IOR_EXPR since this may produce more
8676 if (TREE_CODE (arg0) == BIT_AND_EXPR
8677 && TREE_CODE (arg1) == BIT_AND_EXPR
8678 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8679 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8680 && integer_zerop (const_binop (BIT_AND_EXPR,
8681 TREE_OPERAND (arg0, 1),
8682 TREE_OPERAND (arg1, 1), 0)))
8684 code = BIT_IOR_EXPR;
8688 /* (X | Y) ^ X -> Y & ~ X*/
8689 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8690 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8692 tree t2 = TREE_OPERAND (arg0, 1);
8693 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8695 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8696 fold_convert (type, t1));
8700 /* (Y | X) ^ X -> Y & ~ X*/
8701 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8702 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8704 tree t2 = TREE_OPERAND (arg0, 0);
8705 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8707 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8708 fold_convert (type, t1));
8712 /* X ^ (X | Y) -> Y & ~ X*/
8713 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8714 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8716 tree t2 = TREE_OPERAND (arg1, 1);
8717 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8719 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8720 fold_convert (type, t1));
8724 /* X ^ (Y | X) -> Y & ~ X*/
8725 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8726 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8728 tree t2 = TREE_OPERAND (arg1, 0);
8729 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8731 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8732 fold_convert (type, t1));
8736 /* Convert ~X ^ ~Y to X ^ Y. */
8737 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8738 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8739 return fold_build2 (code, type,
8740 fold_convert (type, TREE_OPERAND (arg0, 0)),
8741 fold_convert (type, TREE_OPERAND (arg1, 0)));
8743 /* See if this can be simplified into a rotate first. If that
8744 is unsuccessful continue in the association code. */
8748 if (integer_all_onesp (arg1))
8749 return non_lvalue (fold_convert (type, arg0));
8750 if (integer_zerop (arg1))
8751 return omit_one_operand (type, arg1, arg0);
8752 if (operand_equal_p (arg0, arg1, 0))
8753 return non_lvalue (fold_convert (type, arg0));
8755 /* ~X & X is always zero. */
8756 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8757 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8758 return omit_one_operand (type, integer_zero_node, arg1);
8760 /* X & ~X is always zero. */
8761 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8762 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8763 return omit_one_operand (type, integer_zero_node, arg0);
8765 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
8766 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8767 && TREE_CODE (arg1) == INTEGER_CST
8768 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8769 return fold_build2 (BIT_IOR_EXPR, type,
8770 fold_build2 (BIT_AND_EXPR, type,
8771 TREE_OPERAND (arg0, 0), arg1),
8772 fold_build2 (BIT_AND_EXPR, type,
8773 TREE_OPERAND (arg0, 1), arg1));
8775 /* (X | Y) & Y is (X, Y). */
8776 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8777 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8778 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8779 /* (X | Y) & X is (Y, X). */
8780 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8781 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8782 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
8783 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
8784 /* X & (X | Y) is (Y, X). */
8785 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8786 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8787 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
8788 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
8789 /* X & (Y | X) is (Y, X). */
8790 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8791 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
8792 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8793 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
8795 t1 = distribute_bit_expr (code, type, arg0, arg1);
8796 if (t1 != NULL_TREE)
8798 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8799 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8800 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8803 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8805 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8806 && (~TREE_INT_CST_LOW (arg1)
8807 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8808 return fold_convert (type, TREE_OPERAND (arg0, 0));
8811 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8813 This results in more efficient code for machines without a NOR
8814 instruction. Combine will canonicalize to the first form
8815 which will allow use of NOR instructions provided by the
8816 backend if they exist. */
8817 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8818 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8820 return fold_build1 (BIT_NOT_EXPR, type,
8821 build2 (BIT_IOR_EXPR, type,
8822 TREE_OPERAND (arg0, 0),
8823 TREE_OPERAND (arg1, 0)));
8829 /* Don't touch a floating-point divide by zero unless the mode
8830 of the constant can represent infinity. */
8831 if (TREE_CODE (arg1) == REAL_CST
8832 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8833 && real_zerop (arg1))
8836 /* Optimize A / A to 1.0 if we don't care about
8837 NaNs or Infinities. */
8838 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8839 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8840 && operand_equal_p (arg0, arg1, 0))
8842 tree r = build_real (TREE_TYPE (arg0), dconst1);
8844 return omit_two_operands (type, r, arg0, arg1);
8847 /* (-A) / (-B) -> A / B */
8848 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8849 return fold_build2 (RDIV_EXPR, type,
8850 TREE_OPERAND (arg0, 0),
8851 negate_expr (arg1));
8852 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8853 return fold_build2 (RDIV_EXPR, type,
8855 TREE_OPERAND (arg1, 0));
8857 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8858 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8859 && real_onep (arg1))
8860 return non_lvalue (fold_convert (type, arg0));
8862 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8863 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8864 && real_minus_onep (arg1))
8865 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8867 /* If ARG1 is a constant, we can convert this to a multiply by the
8868 reciprocal. This does not have the same rounding properties,
8869 so only do this if -funsafe-math-optimizations. We can actually
8870 always safely do it if ARG1 is a power of two, but it's hard to
8871 tell if it is or not in a portable manner. */
8872 if (TREE_CODE (arg1) == REAL_CST)
8874 if (flag_unsafe_math_optimizations
8875 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8877 return fold_build2 (MULT_EXPR, type, arg0, tem);
8878 /* Find the reciprocal if optimizing and the result is exact. */
8882 r = TREE_REAL_CST (arg1);
8883 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8885 tem = build_real (type, r);
8886 return fold_build2 (MULT_EXPR, type,
8887 fold_convert (type, arg0), tem);
8891 /* Convert A/B/C to A/(B*C). */
8892 if (flag_unsafe_math_optimizations
8893 && TREE_CODE (arg0) == RDIV_EXPR)
8894 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8895 fold_build2 (MULT_EXPR, type,
8896 TREE_OPERAND (arg0, 1), arg1));
8898 /* Convert A/(B/C) to (A/B)*C. */
8899 if (flag_unsafe_math_optimizations
8900 && TREE_CODE (arg1) == RDIV_EXPR)
8901 return fold_build2 (MULT_EXPR, type,
8902 fold_build2 (RDIV_EXPR, type, arg0,
8903 TREE_OPERAND (arg1, 0)),
8904 TREE_OPERAND (arg1, 1));
8906 /* Convert C1/(X*C2) into (C1/C2)/X. */
8907 if (flag_unsafe_math_optimizations
8908 && TREE_CODE (arg1) == MULT_EXPR
8909 && TREE_CODE (arg0) == REAL_CST
8910 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8912 tree tem = const_binop (RDIV_EXPR, arg0,
8913 TREE_OPERAND (arg1, 1), 0);
8915 return fold_build2 (RDIV_EXPR, type, tem,
8916 TREE_OPERAND (arg1, 0));
8919 if (flag_unsafe_math_optimizations)
8921 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8922 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8924 /* Optimize sin(x)/cos(x) as tan(x). */
8925 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8926 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8927 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8928 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8929 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8931 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8933 if (tanfn != NULL_TREE)
8934 return build_function_call_expr (tanfn,
8935 TREE_OPERAND (arg0, 1));
8938 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8939 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8940 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8941 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8942 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8943 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8945 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8947 if (tanfn != NULL_TREE)
8949 tree tmp = TREE_OPERAND (arg0, 1);
8950 tmp = build_function_call_expr (tanfn, tmp);
8951 return fold_build2 (RDIV_EXPR, type,
8952 build_real (type, dconst1), tmp);
8956 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
8957 NaNs or Infinities. */
8958 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
8959 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
8960 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
8962 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8963 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8965 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8966 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8967 && operand_equal_p (arg00, arg01, 0))
8969 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8971 if (cosfn != NULL_TREE)
8972 return build_function_call_expr (cosfn,
8973 TREE_OPERAND (arg0, 1));
8977 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
8978 NaNs or Infinities. */
8979 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
8980 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
8981 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
8983 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8984 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8986 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8987 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8988 && operand_equal_p (arg00, arg01, 0))
8990 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8992 if (cosfn != NULL_TREE)
8994 tree tmp = TREE_OPERAND (arg0, 1);
8995 tmp = build_function_call_expr (cosfn, tmp);
8996 return fold_build2 (RDIV_EXPR, type,
8997 build_real (type, dconst1),
9003 /* Optimize pow(x,c)/x as pow(x,c-1). */
9004 if (fcode0 == BUILT_IN_POW
9005 || fcode0 == BUILT_IN_POWF
9006 || fcode0 == BUILT_IN_POWL)
9008 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9009 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9010 if (TREE_CODE (arg01) == REAL_CST
9011 && ! TREE_CONSTANT_OVERFLOW (arg01)
9012 && operand_equal_p (arg1, arg00, 0))
9014 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9018 c = TREE_REAL_CST (arg01);
9019 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9020 arg = build_real (type, c);
9021 arglist = build_tree_list (NULL_TREE, arg);
9022 arglist = tree_cons (NULL_TREE, arg1, arglist);
9023 return build_function_call_expr (powfn, arglist);
9027 /* Optimize x/expN(y) into x*expN(-y). */
9028 if (BUILTIN_EXPONENT_P (fcode1))
9030 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9031 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9032 tree arglist = build_tree_list (NULL_TREE,
9033 fold_convert (type, arg));
9034 arg1 = build_function_call_expr (expfn, arglist);
9035 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9038 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9039 if (fcode1 == BUILT_IN_POW
9040 || fcode1 == BUILT_IN_POWF
9041 || fcode1 == BUILT_IN_POWL)
9043 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9044 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9045 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9046 tree neg11 = fold_convert (type, negate_expr (arg11));
9047 tree arglist = tree_cons(NULL_TREE, arg10,
9048 build_tree_list (NULL_TREE, neg11));
9049 arg1 = build_function_call_expr (powfn, arglist);
9050 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9055 case TRUNC_DIV_EXPR:
9056 case ROUND_DIV_EXPR:
9057 case FLOOR_DIV_EXPR:
9059 case EXACT_DIV_EXPR:
9060 if (integer_onep (arg1))
9061 return non_lvalue (fold_convert (type, arg0));
9062 if (integer_zerop (arg1))
9065 if (!TYPE_UNSIGNED (type)
9066 && TREE_CODE (arg1) == INTEGER_CST
9067 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9068 && TREE_INT_CST_HIGH (arg1) == -1)
9069 return fold_convert (type, negate_expr (arg0));
9071 /* Convert -A / -B to A / B when the type is signed and overflow is
9073 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9074 && TREE_CODE (arg0) == NEGATE_EXPR
9075 && negate_expr_p (arg1))
9076 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9077 negate_expr (arg1));
9078 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9079 && TREE_CODE (arg1) == NEGATE_EXPR
9080 && negate_expr_p (arg0))
9081 return fold_build2 (code, type, negate_expr (arg0),
9082 TREE_OPERAND (arg1, 0));
9084 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9085 operation, EXACT_DIV_EXPR.
9087 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9088 At one time others generated faster code, it's not clear if they do
9089 after the last round to changes to the DIV code in expmed.c. */
9090 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9091 && multiple_of_p (type, arg0, arg1))
9092 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9094 if (TREE_CODE (arg1) == INTEGER_CST
9095 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9096 return fold_convert (type, tem);
9101 case FLOOR_MOD_EXPR:
9102 case ROUND_MOD_EXPR:
9103 case TRUNC_MOD_EXPR:
9104 /* X % 1 is always zero, but be sure to preserve any side
9106 if (integer_onep (arg1))
9107 return omit_one_operand (type, integer_zero_node, arg0);
9109 /* X % 0, return X % 0 unchanged so that we can get the
9110 proper warnings and errors. */
9111 if (integer_zerop (arg1))
9114 /* 0 % X is always zero, but be sure to preserve any side
9115 effects in X. Place this after checking for X == 0. */
9116 if (integer_zerop (arg0))
9117 return omit_one_operand (type, integer_zero_node, arg1);
9119 /* X % -1 is zero. */
9120 if (!TYPE_UNSIGNED (type)
9121 && TREE_CODE (arg1) == INTEGER_CST
9122 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9123 && TREE_INT_CST_HIGH (arg1) == -1)
9124 return omit_one_operand (type, integer_zero_node, arg0);
9126 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9127 i.e. "X % C" into "X & C2", if X and C are positive. */
9128 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9129 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
9130 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
9132 unsigned HOST_WIDE_INT high, low;
9136 l = tree_log2 (arg1);
9137 if (l >= HOST_BITS_PER_WIDE_INT)
9139 high = ((unsigned HOST_WIDE_INT) 1
9140 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
9146 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
9149 mask = build_int_cst_wide (type, low, high);
9150 return fold_build2 (BIT_AND_EXPR, type,
9151 fold_convert (type, arg0), mask);
9154 /* X % -C is the same as X % C. */
9155 if (code == TRUNC_MOD_EXPR
9156 && !TYPE_UNSIGNED (type)
9157 && TREE_CODE (arg1) == INTEGER_CST
9158 && !TREE_CONSTANT_OVERFLOW (arg1)
9159 && TREE_INT_CST_HIGH (arg1) < 0
9161 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9162 && !sign_bit_p (arg1, arg1))
9163 return fold_build2 (code, type, fold_convert (type, arg0),
9164 fold_convert (type, negate_expr (arg1)));
9166 /* X % -Y is the same as X % Y. */
9167 if (code == TRUNC_MOD_EXPR
9168 && !TYPE_UNSIGNED (type)
9169 && TREE_CODE (arg1) == NEGATE_EXPR
9171 return fold_build2 (code, type, fold_convert (type, arg0),
9172 fold_convert (type, TREE_OPERAND (arg1, 0)));
9174 if (TREE_CODE (arg1) == INTEGER_CST
9175 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9176 return fold_convert (type, tem);
9182 if (integer_all_onesp (arg0))
9183 return omit_one_operand (type, arg0, arg1);
9187 /* Optimize -1 >> x for arithmetic right shifts. */
9188 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9189 return omit_one_operand (type, arg0, arg1);
9190 /* ... fall through ... */
9194 if (integer_zerop (arg1))
9195 return non_lvalue (fold_convert (type, arg0));
9196 if (integer_zerop (arg0))
9197 return omit_one_operand (type, arg0, arg1);
9199 /* Since negative shift count is not well-defined,
9200 don't try to compute it in the compiler. */
9201 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9204 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9205 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
9206 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9207 && host_integerp (TREE_OPERAND (arg0, 1), false)
9208 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9210 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9211 + TREE_INT_CST_LOW (arg1));
9213 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9214 being well defined. */
9215 if (low >= TYPE_PRECISION (type))
9217 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9218 low = low % TYPE_PRECISION (type);
9219 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9220 return build_int_cst (type, 0);
9222 low = TYPE_PRECISION (type) - 1;
9225 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9226 build_int_cst (type, low));
9229 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9230 into x & ((unsigned)-1 >> c) for unsigned types. */
9231 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9232 || (TYPE_UNSIGNED (type)
9233 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9234 && host_integerp (arg1, false)
9235 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9236 && host_integerp (TREE_OPERAND (arg0, 1), false)
9237 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9239 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9240 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9246 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9248 lshift = build_int_cst (type, -1);
9249 lshift = int_const_binop (code, lshift, arg1, 0);
9251 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9255 /* Rewrite an LROTATE_EXPR by a constant into an
9256 RROTATE_EXPR by a new constant. */
9257 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9259 tree tem = build_int_cst (NULL_TREE,
9260 GET_MODE_BITSIZE (TYPE_MODE (type)));
9261 tem = fold_convert (TREE_TYPE (arg1), tem);
9262 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9263 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9266 /* If we have a rotate of a bit operation with the rotate count and
9267 the second operand of the bit operation both constant,
9268 permute the two operations. */
9269 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9270 && (TREE_CODE (arg0) == BIT_AND_EXPR
9271 || TREE_CODE (arg0) == BIT_IOR_EXPR
9272 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9273 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9274 return fold_build2 (TREE_CODE (arg0), type,
9275 fold_build2 (code, type,
9276 TREE_OPERAND (arg0, 0), arg1),
9277 fold_build2 (code, type,
9278 TREE_OPERAND (arg0, 1), arg1));
9280 /* Two consecutive rotates adding up to the width of the mode can
9282 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9283 && TREE_CODE (arg0) == RROTATE_EXPR
9284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9285 && TREE_INT_CST_HIGH (arg1) == 0
9286 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
9287 && ((TREE_INT_CST_LOW (arg1)
9288 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
9289 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
9290 return TREE_OPERAND (arg0, 0);
9295 if (operand_equal_p (arg0, arg1, 0))
9296 return omit_one_operand (type, arg0, arg1);
9297 if (INTEGRAL_TYPE_P (type)
9298 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9299 return omit_one_operand (type, arg1, arg0);
9300 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
9306 if (operand_equal_p (arg0, arg1, 0))
9307 return omit_one_operand (type, arg0, arg1);
9308 if (INTEGRAL_TYPE_P (type)
9309 && TYPE_MAX_VALUE (type)
9310 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9311 return omit_one_operand (type, arg1, arg0);
9312 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
9317 case TRUTH_ANDIF_EXPR:
9318 /* Note that the operands of this must be ints
9319 and their values must be 0 or 1.
9320 ("true" is a fixed value perhaps depending on the language.) */
9321 /* If first arg is constant zero, return it. */
9322 if (integer_zerop (arg0))
9323 return fold_convert (type, arg0);
9324 case TRUTH_AND_EXPR:
9325 /* If either arg is constant true, drop it. */
9326 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9327 return non_lvalue (fold_convert (type, arg1));
9328 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
9329 /* Preserve sequence points. */
9330 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9331 return non_lvalue (fold_convert (type, arg0));
9332 /* If second arg is constant zero, result is zero, but first arg
9333 must be evaluated. */
9334 if (integer_zerop (arg1))
9335 return omit_one_operand (type, arg1, arg0);
9336 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9337 case will be handled here. */
9338 if (integer_zerop (arg0))
9339 return omit_one_operand (type, arg0, arg1);
9341 /* !X && X is always false. */
9342 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9343 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9344 return omit_one_operand (type, integer_zero_node, arg1);
9345 /* X && !X is always false. */
9346 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9347 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9348 return omit_one_operand (type, integer_zero_node, arg0);
9350 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
9351 means A >= Y && A != MAX, but in this case we know that
9354 if (!TREE_SIDE_EFFECTS (arg0)
9355 && !TREE_SIDE_EFFECTS (arg1))
9357 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
9358 if (tem && !operand_equal_p (tem, arg0, 0))
9359 return fold_build2 (code, type, tem, arg1);
9361 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
9362 if (tem && !operand_equal_p (tem, arg1, 0))
9363 return fold_build2 (code, type, arg0, tem);
9367 /* We only do these simplifications if we are optimizing. */
9371 /* Check for things like (A || B) && (A || C). We can convert this
9372 to A || (B && C). Note that either operator can be any of the four
9373 truth and/or operations and the transformation will still be
9374 valid. Also note that we only care about order for the
9375 ANDIF and ORIF operators. If B contains side effects, this
9376 might change the truth-value of A. */
9377 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9378 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9379 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9380 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9381 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9382 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9384 tree a00 = TREE_OPERAND (arg0, 0);
9385 tree a01 = TREE_OPERAND (arg0, 1);
9386 tree a10 = TREE_OPERAND (arg1, 0);
9387 tree a11 = TREE_OPERAND (arg1, 1);
9388 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9389 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9390 && (code == TRUTH_AND_EXPR
9391 || code == TRUTH_OR_EXPR));
9393 if (operand_equal_p (a00, a10, 0))
9394 return fold_build2 (TREE_CODE (arg0), type, a00,
9395 fold_build2 (code, type, a01, a11));
9396 else if (commutative && operand_equal_p (a00, a11, 0))
9397 return fold_build2 (TREE_CODE (arg0), type, a00,
9398 fold_build2 (code, type, a01, a10));
9399 else if (commutative && operand_equal_p (a01, a10, 0))
9400 return fold_build2 (TREE_CODE (arg0), type, a01,
9401 fold_build2 (code, type, a00, a11));
9403 /* This case if tricky because we must either have commutative
9404 operators or else A10 must not have side-effects. */
9406 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9407 && operand_equal_p (a01, a11, 0))
9408 return fold_build2 (TREE_CODE (arg0), type,
9409 fold_build2 (code, type, a00, a10),
9413 /* See if we can build a range comparison. */
9414 if (0 != (tem = fold_range_test (code, type, op0, op1)))
9417 /* Check for the possibility of merging component references. If our
9418 lhs is another similar operation, try to merge its rhs with our
9419 rhs. Then try to merge our lhs and rhs. */
9420 if (TREE_CODE (arg0) == code
9421 && 0 != (tem = fold_truthop (code, type,
9422 TREE_OPERAND (arg0, 1), arg1)))
9423 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9425 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
9430 case TRUTH_ORIF_EXPR:
9431 /* Note that the operands of this must be ints
9432 and their values must be 0 or true.
9433 ("true" is a fixed value perhaps depending on the language.) */
9434 /* If first arg is constant true, return it. */
9435 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9436 return fold_convert (type, arg0);
9438 /* If either arg is constant zero, drop it. */
9439 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
9440 return non_lvalue (fold_convert (type, arg1));
9441 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
9442 /* Preserve sequence points. */
9443 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9444 return non_lvalue (fold_convert (type, arg0));
9445 /* If second arg is constant true, result is true, but we must
9446 evaluate first arg. */
9447 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
9448 return omit_one_operand (type, arg1, arg0);
9449 /* Likewise for first arg, but note this only occurs here for
9451 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9452 return omit_one_operand (type, arg0, arg1);
9454 /* !X || X is always true. */
9455 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9456 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9457 return omit_one_operand (type, integer_one_node, arg1);
9458 /* X || !X is always true. */
9459 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9460 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9461 return omit_one_operand (type, integer_one_node, arg0);
9465 case TRUTH_XOR_EXPR:
9466 /* If the second arg is constant zero, drop it. */
9467 if (integer_zerop (arg1))
9468 return non_lvalue (fold_convert (type, arg0));
9469 /* If the second arg is constant true, this is a logical inversion. */
9470 if (integer_onep (arg1))
9472 /* Only call invert_truthvalue if operand is a truth value. */
9473 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
9474 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
9476 tem = invert_truthvalue (arg0);
9477 return non_lvalue (fold_convert (type, tem));
9479 /* Identical arguments cancel to zero. */
9480 if (operand_equal_p (arg0, arg1, 0))
9481 return omit_one_operand (type, integer_zero_node, arg0);
9483 /* !X ^ X is always true. */
9484 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9485 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9486 return omit_one_operand (type, integer_one_node, arg1);
9488 /* X ^ !X is always true. */
9489 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9490 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9491 return omit_one_operand (type, integer_one_node, arg0);
9497 tem = fold_comparison (code, type, op0, op1);
9498 if (tem != NULL_TREE)
9501 /* bool_var != 0 becomes bool_var. */
9502 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
9504 return non_lvalue (fold_convert (type, arg0));
9506 /* bool_var == 1 becomes bool_var. */
9507 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
9509 return non_lvalue (fold_convert (type, arg0));
9511 /* bool_var != 1 becomes !bool_var. */
9512 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
9514 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
9516 /* bool_var == 0 becomes !bool_var. */
9517 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
9519 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
9521 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
9522 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9523 && TREE_CODE (arg1) == INTEGER_CST)
9524 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9525 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9528 /* If this is an equality comparison of the address of a non-weak
9529 object against zero, then we know the result. */
9530 if (TREE_CODE (arg0) == ADDR_EXPR
9531 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
9532 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9533 && integer_zerop (arg1))
9534 return constant_boolean_node (code != EQ_EXPR, type);
9536 /* If this is an equality comparison of the address of two non-weak,
9537 unaliased symbols neither of which are extern (since we do not
9538 have access to attributes for externs), then we know the result. */
9539 if (TREE_CODE (arg0) == ADDR_EXPR
9540 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
9541 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
9542 && ! lookup_attribute ("alias",
9543 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
9544 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
9545 && TREE_CODE (arg1) == ADDR_EXPR
9546 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
9547 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
9548 && ! lookup_attribute ("alias",
9549 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
9550 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
9552 /* We know that we're looking at the address of two
9553 non-weak, unaliased, static _DECL nodes.
9555 It is both wasteful and incorrect to call operand_equal_p
9556 to compare the two ADDR_EXPR nodes. It is wasteful in that
9557 all we need to do is test pointer equality for the arguments
9558 to the two ADDR_EXPR nodes. It is incorrect to use
9559 operand_equal_p as that function is NOT equivalent to a
9560 C equality test. It can in fact return false for two
9561 objects which would test as equal using the C equality
9563 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
9564 return constant_boolean_node (equal
9565 ? code == EQ_EXPR : code != EQ_EXPR,
9569 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9570 a MINUS_EXPR of a constant, we can convert it into a comparison with
9571 a revised constant as long as no overflow occurs. */
9572 if (TREE_CODE (arg1) == INTEGER_CST
9573 && (TREE_CODE (arg0) == PLUS_EXPR
9574 || TREE_CODE (arg0) == MINUS_EXPR)
9575 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9576 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9577 ? MINUS_EXPR : PLUS_EXPR,
9578 arg1, TREE_OPERAND (arg0, 1), 0))
9579 && ! TREE_CONSTANT_OVERFLOW (tem))
9580 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9582 /* Similarly for a NEGATE_EXPR. */
9583 if (TREE_CODE (arg0) == NEGATE_EXPR
9584 && TREE_CODE (arg1) == INTEGER_CST
9585 && 0 != (tem = negate_expr (arg1))
9586 && TREE_CODE (tem) == INTEGER_CST
9587 && ! TREE_CONSTANT_OVERFLOW (tem))
9588 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9590 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9591 for !=. Don't do this for ordered comparisons due to overflow. */
9592 if (TREE_CODE (arg0) == MINUS_EXPR
9593 && integer_zerop (arg1))
9594 return fold_build2 (code, type,
9595 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9597 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9598 if (TREE_CODE (arg0) == ABS_EXPR
9599 && (integer_zerop (arg1) || real_zerop (arg1)))
9600 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9602 /* If this is an EQ or NE comparison with zero and ARG0 is
9603 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9604 two operations, but the latter can be done in one less insn
9605 on machines that have only two-operand insns or on which a
9606 constant cannot be the first operand. */
9607 if (TREE_CODE (arg0) == BIT_AND_EXPR
9608 && integer_zerop (arg1))
9610 tree arg00 = TREE_OPERAND (arg0, 0);
9611 tree arg01 = TREE_OPERAND (arg0, 1);
9612 if (TREE_CODE (arg00) == LSHIFT_EXPR
9613 && integer_onep (TREE_OPERAND (arg00, 0)))
9615 fold_build2 (code, type,
9616 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9617 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9618 arg01, TREE_OPERAND (arg00, 1)),
9619 fold_convert (TREE_TYPE (arg0),
9622 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9623 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9625 fold_build2 (code, type,
9626 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9627 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9628 arg00, TREE_OPERAND (arg01, 1)),
9629 fold_convert (TREE_TYPE (arg0),
9634 /* If this is an NE or EQ comparison of zero against the result of a
9635 signed MOD operation whose second operand is a power of 2, make
9636 the MOD operation unsigned since it is simpler and equivalent. */
9637 if (integer_zerop (arg1)
9638 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9639 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9640 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9641 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9642 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9643 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9645 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9646 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9647 fold_convert (newtype,
9648 TREE_OPERAND (arg0, 0)),
9649 fold_convert (newtype,
9650 TREE_OPERAND (arg0, 1)));
9652 return fold_build2 (code, type, newmod,
9653 fold_convert (newtype, arg1));
9656 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
9657 C1 is a valid shift constant, and C2 is a power of two, i.e.
9659 if (TREE_CODE (arg0) == BIT_AND_EXPR
9660 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
9661 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
9663 && integer_pow2p (TREE_OPERAND (arg0, 1))
9664 && integer_zerop (arg1))
9666 tree itype = TREE_TYPE (arg0);
9667 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
9668 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
9670 /* Check for a valid shift count. */
9671 if (TREE_INT_CST_HIGH (arg001) == 0
9672 && TREE_INT_CST_LOW (arg001) < prec)
9674 tree arg01 = TREE_OPERAND (arg0, 1);
9675 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9676 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
9677 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
9678 can be rewritten as (X & (C2 << C1)) != 0. */
9679 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
9681 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
9682 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
9683 return fold_build2 (code, type, tem, arg1);
9685 /* Otherwise, for signed (arithmetic) shifts,
9686 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
9687 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
9688 else if (!TYPE_UNSIGNED (itype))
9689 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
9690 arg000, build_int_cst (itype, 0));
9691 /* Otherwise, of unsigned (logical) shifts,
9692 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
9693 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
9695 return omit_one_operand (type,
9696 code == EQ_EXPR ? integer_one_node
9697 : integer_zero_node,
9702 /* If this is an NE comparison of zero with an AND of one, remove the
9703 comparison since the AND will give the correct value. */
9705 && integer_zerop (arg1)
9706 && TREE_CODE (arg0) == BIT_AND_EXPR
9707 && integer_onep (TREE_OPERAND (arg0, 1)))
9708 return fold_convert (type, arg0);
9710 /* If we have (A & C) == C where C is a power of 2, convert this into
9711 (A & C) != 0. Similarly for NE_EXPR. */
9712 if (TREE_CODE (arg0) == BIT_AND_EXPR
9713 && integer_pow2p (TREE_OPERAND (arg0, 1))
9714 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9715 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9716 arg0, fold_convert (TREE_TYPE (arg0),
9717 integer_zero_node));
9719 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9720 bit, then fold the expression into A < 0 or A >= 0. */
9721 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9725 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9726 Similarly for NE_EXPR. */
9727 if (TREE_CODE (arg0) == BIT_AND_EXPR
9728 && TREE_CODE (arg1) == INTEGER_CST
9729 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9731 tree notc = fold_build1 (BIT_NOT_EXPR,
9732 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9733 TREE_OPERAND (arg0, 1));
9734 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9736 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9737 if (integer_nonzerop (dandnotc))
9738 return omit_one_operand (type, rslt, arg0);
9741 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9742 Similarly for NE_EXPR. */
9743 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9744 && TREE_CODE (arg1) == INTEGER_CST
9745 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9747 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9748 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9749 TREE_OPERAND (arg0, 1), notd);
9750 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9751 if (integer_nonzerop (candnotd))
9752 return omit_one_operand (type, rslt, arg0);
9755 /* If this is a comparison of a field, we may be able to simplify it. */
9756 if (((TREE_CODE (arg0) == COMPONENT_REF
9757 && lang_hooks.can_use_bit_fields_p ())
9758 || TREE_CODE (arg0) == BIT_FIELD_REF)
9759 /* Handle the constant case even without -O
9760 to make sure the warnings are given. */
9761 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9763 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9768 /* Optimize comparisons of strlen vs zero to a compare of the
9769 first character of the string vs zero. To wit,
9770 strlen(ptr) == 0 => *ptr == 0
9771 strlen(ptr) != 0 => *ptr != 0
9772 Other cases should reduce to one of these two (or a constant)
9773 due to the return value of strlen being unsigned. */
9774 if (TREE_CODE (arg0) == CALL_EXPR
9775 && integer_zerop (arg1))
9777 tree fndecl = get_callee_fndecl (arg0);
9781 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9782 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9783 && (arglist = TREE_OPERAND (arg0, 1))
9784 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9785 && ! TREE_CHAIN (arglist))
9787 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9788 return fold_build2 (code, type, iref,
9789 build_int_cst (TREE_TYPE (iref), 0));
9793 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
9794 of X. Similarly fold (X >> C) == 0 into X >= 0. */
9795 if (TREE_CODE (arg0) == RSHIFT_EXPR
9796 && integer_zerop (arg1)
9797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9799 tree arg00 = TREE_OPERAND (arg0, 0);
9800 tree arg01 = TREE_OPERAND (arg0, 1);
9801 tree itype = TREE_TYPE (arg00);
9802 if (TREE_INT_CST_HIGH (arg01) == 0
9803 && TREE_INT_CST_LOW (arg01)
9804 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
9806 if (TYPE_UNSIGNED (itype))
9808 itype = lang_hooks.types.signed_type (itype);
9809 arg00 = fold_convert (itype, arg00);
9811 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
9812 type, arg00, build_int_cst (itype, 0));
9816 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
9817 if (integer_zerop (arg1)
9818 && TREE_CODE (arg0) == BIT_XOR_EXPR)
9819 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9820 TREE_OPERAND (arg0, 1));
9822 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
9823 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9824 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9825 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9826 build_int_cst (TREE_TYPE (arg1), 0));
9827 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
9828 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9829 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9830 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9831 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
9832 build_int_cst (TREE_TYPE (arg1), 0));
9834 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
9835 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9836 && TREE_CODE (arg1) == INTEGER_CST
9837 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9838 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9839 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
9840 TREE_OPERAND (arg0, 1), arg1));
9842 if (integer_zerop (arg1)
9843 && tree_expr_nonzero_p (arg0))
9845 tree res = constant_boolean_node (code==NE_EXPR, type);
9846 return omit_one_operand (type, res, arg0);
9854 tem = fold_comparison (code, type, op0, op1);
9855 if (tem != NULL_TREE)
9858 /* Transform comparisons of the form X +- C CMP X. */
9859 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9860 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9861 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9862 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9863 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9864 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9865 && !(flag_wrapv || flag_trapv))))
9867 tree arg01 = TREE_OPERAND (arg0, 1);
9868 enum tree_code code0 = TREE_CODE (arg0);
9871 if (TREE_CODE (arg01) == REAL_CST)
9872 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9874 is_positive = tree_int_cst_sgn (arg01);
9876 /* (X - c) > X becomes false. */
9878 && ((code0 == MINUS_EXPR && is_positive >= 0)
9879 || (code0 == PLUS_EXPR && is_positive <= 0)))
9880 return constant_boolean_node (0, type);
9882 /* Likewise (X + c) < X becomes false. */
9884 && ((code0 == PLUS_EXPR && is_positive >= 0)
9885 || (code0 == MINUS_EXPR && is_positive <= 0)))
9886 return constant_boolean_node (0, type);
9888 /* Convert (X - c) <= X to true. */
9889 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9891 && ((code0 == MINUS_EXPR && is_positive >= 0)
9892 || (code0 == PLUS_EXPR && is_positive <= 0)))
9893 return constant_boolean_node (1, type);
9895 /* Convert (X + c) >= X to true. */
9896 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9898 && ((code0 == PLUS_EXPR && is_positive >= 0)
9899 || (code0 == MINUS_EXPR && is_positive <= 0)))
9900 return constant_boolean_node (1, type);
9902 if (TREE_CODE (arg01) == INTEGER_CST)
9904 /* Convert X + c > X and X - c < X to true for integers. */
9906 && ((code0 == PLUS_EXPR && is_positive > 0)
9907 || (code0 == MINUS_EXPR && is_positive < 0)))
9908 return constant_boolean_node (1, type);
9911 && ((code0 == MINUS_EXPR && is_positive > 0)
9912 || (code0 == PLUS_EXPR && is_positive < 0)))
9913 return constant_boolean_node (1, type);
9915 /* Convert X + c <= X and X - c >= X to false for integers. */
9917 && ((code0 == PLUS_EXPR && is_positive > 0)
9918 || (code0 == MINUS_EXPR && is_positive < 0)))
9919 return constant_boolean_node (0, type);
9922 && ((code0 == MINUS_EXPR && is_positive > 0)
9923 || (code0 == PLUS_EXPR && is_positive < 0)))
9924 return constant_boolean_node (0, type);
9928 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9929 This transformation affects the cases which are handled in later
9930 optimizations involving comparisons with non-negative constants. */
9931 if (TREE_CODE (arg1) == INTEGER_CST
9932 && TREE_CODE (arg0) != INTEGER_CST
9933 && tree_int_cst_sgn (arg1) > 0)
9935 if (code == GE_EXPR)
9937 arg1 = const_binop (MINUS_EXPR, arg1,
9938 build_int_cst (TREE_TYPE (arg1), 1), 0);
9939 return fold_build2 (GT_EXPR, type, arg0,
9940 fold_convert (TREE_TYPE (arg0), arg1));
9942 if (code == LT_EXPR)
9944 arg1 = const_binop (MINUS_EXPR, arg1,
9945 build_int_cst (TREE_TYPE (arg1), 1), 0);
9946 return fold_build2 (LE_EXPR, type, arg0,
9947 fold_convert (TREE_TYPE (arg0), arg1));
9951 /* Comparisons with the highest or lowest possible integer of
9952 the specified size will have known values. */
9954 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9956 if (TREE_CODE (arg1) == INTEGER_CST
9957 && ! TREE_CONSTANT_OVERFLOW (arg1)
9958 && width <= 2 * HOST_BITS_PER_WIDE_INT
9959 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9960 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9962 HOST_WIDE_INT signed_max_hi;
9963 unsigned HOST_WIDE_INT signed_max_lo;
9964 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9966 if (width <= HOST_BITS_PER_WIDE_INT)
9968 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9973 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9975 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9981 max_lo = signed_max_lo;
9982 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9988 width -= HOST_BITS_PER_WIDE_INT;
9990 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9995 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9997 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10002 max_hi = signed_max_hi;
10003 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10007 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10008 && TREE_INT_CST_LOW (arg1) == max_lo)
10012 return omit_one_operand (type, integer_zero_node, arg0);
10015 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10018 return omit_one_operand (type, integer_one_node, arg0);
10021 return fold_build2 (NE_EXPR, type, arg0, arg1);
10023 /* The GE_EXPR and LT_EXPR cases above are not normally
10024 reached because of previous transformations. */
10029 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10031 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10035 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10036 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10038 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10039 return fold_build2 (NE_EXPR, type, arg0, arg1);
10043 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10045 && TREE_INT_CST_LOW (arg1) == min_lo)
10049 return omit_one_operand (type, integer_zero_node, arg0);
10052 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10055 return omit_one_operand (type, integer_one_node, arg0);
10058 return fold_build2 (NE_EXPR, type, op0, op1);
10063 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10065 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10069 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10070 return fold_build2 (NE_EXPR, type, arg0, arg1);
10072 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10073 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10078 else if (!in_gimple_form
10079 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10080 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10081 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10082 /* signed_type does not work on pointer types. */
10083 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10085 /* The following case also applies to X < signed_max+1
10086 and X >= signed_max+1 because previous transformations. */
10087 if (code == LE_EXPR || code == GT_EXPR)
10090 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10091 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10092 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10093 type, fold_convert (st0, arg0),
10094 build_int_cst (st1, 0));
10100 /* If we are comparing an ABS_EXPR with a constant, we can
10101 convert all the cases into explicit comparisons, but they may
10102 well not be faster than doing the ABS and one comparison.
10103 But ABS (X) <= C is a range comparison, which becomes a subtraction
10104 and a comparison, and is probably faster. */
10105 if (code == LE_EXPR
10106 && TREE_CODE (arg1) == INTEGER_CST
10107 && TREE_CODE (arg0) == ABS_EXPR
10108 && ! TREE_SIDE_EFFECTS (arg0)
10109 && (0 != (tem = negate_expr (arg1)))
10110 && TREE_CODE (tem) == INTEGER_CST
10111 && ! TREE_CONSTANT_OVERFLOW (tem))
10112 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10113 build2 (GE_EXPR, type,
10114 TREE_OPERAND (arg0, 0), tem),
10115 build2 (LE_EXPR, type,
10116 TREE_OPERAND (arg0, 0), arg1));
10118 /* Convert ABS_EXPR<x> >= 0 to true. */
10119 if (code == GE_EXPR
10120 && tree_expr_nonnegative_p (arg0)
10121 && (integer_zerop (arg1)
10122 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10123 && real_zerop (arg1))))
10124 return omit_one_operand (type, integer_one_node, arg0);
10126 /* Convert ABS_EXPR<x> < 0 to false. */
10127 if (code == LT_EXPR
10128 && tree_expr_nonnegative_p (arg0)
10129 && (integer_zerop (arg1) || real_zerop (arg1)))
10130 return omit_one_operand (type, integer_zero_node, arg0);
10132 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10133 and similarly for >= into !=. */
10134 if ((code == LT_EXPR || code == GE_EXPR)
10135 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10136 && TREE_CODE (arg1) == LSHIFT_EXPR
10137 && integer_onep (TREE_OPERAND (arg1, 0)))
10138 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10139 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10140 TREE_OPERAND (arg1, 1)),
10141 build_int_cst (TREE_TYPE (arg0), 0));
10143 if ((code == LT_EXPR || code == GE_EXPR)
10144 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10145 && (TREE_CODE (arg1) == NOP_EXPR
10146 || TREE_CODE (arg1) == CONVERT_EXPR)
10147 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10148 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10150 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10151 fold_convert (TREE_TYPE (arg0),
10152 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10153 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10155 build_int_cst (TREE_TYPE (arg0), 0));
10159 case UNORDERED_EXPR:
10167 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10169 t1 = fold_relational_const (code, type, arg0, arg1);
10170 if (t1 != NULL_TREE)
10174 /* If the first operand is NaN, the result is constant. */
10175 if (TREE_CODE (arg0) == REAL_CST
10176 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10177 && (code != LTGT_EXPR || ! flag_trapping_math))
10179 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10180 ? integer_zero_node
10181 : integer_one_node;
10182 return omit_one_operand (type, t1, arg1);
10185 /* If the second operand is NaN, the result is constant. */
10186 if (TREE_CODE (arg1) == REAL_CST
10187 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10188 && (code != LTGT_EXPR || ! flag_trapping_math))
10190 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10191 ? integer_zero_node
10192 : integer_one_node;
10193 return omit_one_operand (type, t1, arg0);
10196 /* Simplify unordered comparison of something with itself. */
10197 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10198 && operand_equal_p (arg0, arg1, 0))
10199 return constant_boolean_node (1, type);
10201 if (code == LTGT_EXPR
10202 && !flag_trapping_math
10203 && operand_equal_p (arg0, arg1, 0))
10204 return constant_boolean_node (0, type);
10206 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10208 tree targ0 = strip_float_extensions (arg0);
10209 tree targ1 = strip_float_extensions (arg1);
10210 tree newtype = TREE_TYPE (targ0);
10212 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10213 newtype = TREE_TYPE (targ1);
10215 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10216 return fold_build2 (code, type, fold_convert (newtype, targ0),
10217 fold_convert (newtype, targ1));
10222 case COMPOUND_EXPR:
10223 /* When pedantic, a compound expression can be neither an lvalue
10224 nor an integer constant expression. */
10225 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10227 /* Don't let (0, 0) be null pointer constant. */
10228 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10229 : fold_convert (type, arg1);
10230 return pedantic_non_lvalue (tem);
10233 if ((TREE_CODE (arg0) == REAL_CST
10234 && TREE_CODE (arg1) == REAL_CST)
10235 || (TREE_CODE (arg0) == INTEGER_CST
10236 && TREE_CODE (arg1) == INTEGER_CST))
10237 return build_complex (type, arg0, arg1);
10241 /* An ASSERT_EXPR should never be passed to fold_binary. */
10242 gcc_unreachable ();
10246 } /* switch (code) */
10249 /* Callback for walk_tree, looking for LABEL_EXPR.
10250 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10251 Do not check the sub-tree of GOTO_EXPR. */
10254 contains_label_1 (tree *tp,
10255 int *walk_subtrees,
10256 void *data ATTRIBUTE_UNUSED)
10258 switch (TREE_CODE (*tp))
10263 *walk_subtrees = 0;
10270 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10271 accessible from outside the sub-tree. Returns NULL_TREE if no
10272 addressable label is found. */
10275 contains_label_p (tree st)
10277 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10280 /* Fold a ternary expression of code CODE and type TYPE with operands
10281 OP0, OP1, and OP2. Return the folded expression if folding is
10282 successful. Otherwise, return NULL_TREE. */
10285 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10288 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10289 enum tree_code_class kind = TREE_CODE_CLASS (code);
10291 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10292 && TREE_CODE_LENGTH (code) == 3);
10294 /* Strip any conversions that don't change the mode. This is safe
10295 for every expression, except for a comparison expression because
10296 its signedness is derived from its operands. So, in the latter
10297 case, only strip conversions that don't change the signedness.
10299 Note that this is done as an internal manipulation within the
10300 constant folder, in order to find the simplest representation of
10301 the arguments so that their form can be studied. In any cases,
10302 the appropriate type conversions should be put back in the tree
10303 that will get out of the constant folder. */
10318 case COMPONENT_REF:
10319 if (TREE_CODE (arg0) == CONSTRUCTOR
10320 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10322 unsigned HOST_WIDE_INT idx;
10324 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10331 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10332 so all simple results must be passed through pedantic_non_lvalue. */
10333 if (TREE_CODE (arg0) == INTEGER_CST)
10335 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10336 tem = integer_zerop (arg0) ? op2 : op1;
10337 /* Only optimize constant conditions when the selected branch
10338 has the same type as the COND_EXPR. This avoids optimizing
10339 away "c ? x : throw", where the throw has a void type.
10340 Avoid throwing away that operand which contains label. */
10341 if ((!TREE_SIDE_EFFECTS (unused_op)
10342 || !contains_label_p (unused_op))
10343 && (! VOID_TYPE_P (TREE_TYPE (tem))
10344 || VOID_TYPE_P (type)))
10345 return pedantic_non_lvalue (tem);
10348 if (operand_equal_p (arg1, op2, 0))
10349 return pedantic_omit_one_operand (type, arg1, arg0);
10351 /* If we have A op B ? A : C, we may be able to convert this to a
10352 simpler expression, depending on the operation and the values
10353 of B and C. Signed zeros prevent all of these transformations,
10354 for reasons given above each one.
10356 Also try swapping the arguments and inverting the conditional. */
10357 if (COMPARISON_CLASS_P (arg0)
10358 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10359 arg1, TREE_OPERAND (arg0, 1))
10360 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10362 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10367 if (COMPARISON_CLASS_P (arg0)
10368 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10370 TREE_OPERAND (arg0, 1))
10371 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10373 tem = invert_truthvalue (arg0);
10374 if (COMPARISON_CLASS_P (tem))
10376 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10382 /* If the second operand is simpler than the third, swap them
10383 since that produces better jump optimization results. */
10384 if (truth_value_p (TREE_CODE (arg0))
10385 && tree_swap_operands_p (op1, op2, false))
10387 /* See if this can be inverted. If it can't, possibly because
10388 it was a floating-point inequality comparison, don't do
10390 tem = invert_truthvalue (arg0);
10392 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10393 return fold_build3 (code, type, tem, op2, op1);
10396 /* Convert A ? 1 : 0 to simply A. */
10397 if (integer_onep (op1)
10398 && integer_zerop (op2)
10399 /* If we try to convert OP0 to our type, the
10400 call to fold will try to move the conversion inside
10401 a COND, which will recurse. In that case, the COND_EXPR
10402 is probably the best choice, so leave it alone. */
10403 && type == TREE_TYPE (arg0))
10404 return pedantic_non_lvalue (arg0);
10406 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10407 over COND_EXPR in cases such as floating point comparisons. */
10408 if (integer_zerop (op1)
10409 && integer_onep (op2)
10410 && truth_value_p (TREE_CODE (arg0)))
10411 return pedantic_non_lvalue (fold_convert (type,
10412 invert_truthvalue (arg0)));
10414 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10415 if (TREE_CODE (arg0) == LT_EXPR
10416 && integer_zerop (TREE_OPERAND (arg0, 1))
10417 && integer_zerop (op2)
10418 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10419 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10420 TREE_TYPE (tem), tem, arg1));
10422 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10423 already handled above. */
10424 if (TREE_CODE (arg0) == BIT_AND_EXPR
10425 && integer_onep (TREE_OPERAND (arg0, 1))
10426 && integer_zerop (op2)
10427 && integer_pow2p (arg1))
10429 tree tem = TREE_OPERAND (arg0, 0);
10431 if (TREE_CODE (tem) == RSHIFT_EXPR
10432 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10433 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10434 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10435 return fold_build2 (BIT_AND_EXPR, type,
10436 TREE_OPERAND (tem, 0), arg1);
10439 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10440 is probably obsolete because the first operand should be a
10441 truth value (that's why we have the two cases above), but let's
10442 leave it in until we can confirm this for all front-ends. */
10443 if (integer_zerop (op2)
10444 && TREE_CODE (arg0) == NE_EXPR
10445 && integer_zerop (TREE_OPERAND (arg0, 1))
10446 && integer_pow2p (arg1)
10447 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10448 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10449 arg1, OEP_ONLY_CONST))
10450 return pedantic_non_lvalue (fold_convert (type,
10451 TREE_OPERAND (arg0, 0)));
10453 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10454 if (integer_zerop (op2)
10455 && truth_value_p (TREE_CODE (arg0))
10456 && truth_value_p (TREE_CODE (arg1)))
10457 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10459 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10460 if (integer_onep (op2)
10461 && truth_value_p (TREE_CODE (arg0))
10462 && truth_value_p (TREE_CODE (arg1)))
10464 /* Only perform transformation if ARG0 is easily inverted. */
10465 tem = invert_truthvalue (arg0);
10466 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10467 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10470 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10471 if (integer_zerop (arg1)
10472 && truth_value_p (TREE_CODE (arg0))
10473 && truth_value_p (TREE_CODE (op2)))
10475 /* Only perform transformation if ARG0 is easily inverted. */
10476 tem = invert_truthvalue (arg0);
10477 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10478 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10481 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10482 if (integer_onep (arg1)
10483 && truth_value_p (TREE_CODE (arg0))
10484 && truth_value_p (TREE_CODE (op2)))
10485 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10490 /* Check for a built-in function. */
10491 if (TREE_CODE (op0) == ADDR_EXPR
10492 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10493 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10494 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10495 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
10496 here are when we've propagated the address of a decl into the
10498 if (TREE_CODE (op0) == OBJ_TYPE_REF
10499 && lang_hooks.fold_obj_type_ref
10500 && TREE_CODE (OBJ_TYPE_REF_OBJECT (op0)) == ADDR_EXPR
10501 && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (op0), 0)))
10505 /* ??? Caution: Broken ADDR_EXPR semantics means that
10506 looking at the type of the operand of the addr_expr
10507 can yield an array type. See silly exception in
10508 check_pointer_types_r. */
10510 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (op0)));
10511 t = lang_hooks.fold_obj_type_ref (op0, t);
10513 return fold_build3 (code, type, t, op1, op2);
10517 case BIT_FIELD_REF:
10518 if (TREE_CODE (arg0) == VECTOR_CST
10519 && type == TREE_TYPE (TREE_TYPE (arg0))
10520 && host_integerp (arg1, 1)
10521 && host_integerp (op2, 1))
10523 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10524 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10527 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10528 && (idx % width) == 0
10529 && (idx = idx / width)
10530 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10532 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10533 while (idx-- > 0 && elements)
10534 elements = TREE_CHAIN (elements);
10536 return TREE_VALUE (elements);
10538 return fold_convert (type, integer_zero_node);
10545 } /* switch (code) */
10548 /* Perform constant folding and related simplification of EXPR.
10549 The related simplifications include x*1 => x, x*0 => 0, etc.,
10550 and application of the associative law.
10551 NOP_EXPR conversions may be removed freely (as long as we
10552 are careful not to change the type of the overall expression).
10553 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10554 but we can constant-fold them if they have constant operands. */
10556 #ifdef ENABLE_FOLD_CHECKING
10557 # define fold(x) fold_1 (x)
10558 static tree fold_1 (tree);
10564 const tree t = expr;
10565 enum tree_code code = TREE_CODE (t);
10566 enum tree_code_class kind = TREE_CODE_CLASS (code);
10569 /* Return right away if a constant. */
10570 if (kind == tcc_constant)
10573 if (IS_EXPR_CODE_CLASS (kind))
10575 tree type = TREE_TYPE (t);
10576 tree op0, op1, op2;
10578 switch (TREE_CODE_LENGTH (code))
10581 op0 = TREE_OPERAND (t, 0);
10582 tem = fold_unary (code, type, op0);
10583 return tem ? tem : expr;
10585 op0 = TREE_OPERAND (t, 0);
10586 op1 = TREE_OPERAND (t, 1);
10587 tem = fold_binary (code, type, op0, op1);
10588 return tem ? tem : expr;
10590 op0 = TREE_OPERAND (t, 0);
10591 op1 = TREE_OPERAND (t, 1);
10592 op2 = TREE_OPERAND (t, 2);
10593 tem = fold_ternary (code, type, op0, op1, op2);
10594 return tem ? tem : expr;
10603 return fold (DECL_INITIAL (t));
10607 } /* switch (code) */
10610 #ifdef ENABLE_FOLD_CHECKING
10613 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10614 static void fold_check_failed (tree, tree);
10615 void print_fold_checksum (tree);
10617 /* When --enable-checking=fold, compute a digest of expr before
10618 and after actual fold call to see if fold did not accidentally
10619 change original expr. */
10625 struct md5_ctx ctx;
10626 unsigned char checksum_before[16], checksum_after[16];
10629 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10630 md5_init_ctx (&ctx);
10631 fold_checksum_tree (expr, &ctx, ht);
10632 md5_finish_ctx (&ctx, checksum_before);
10635 ret = fold_1 (expr);
10637 md5_init_ctx (&ctx);
10638 fold_checksum_tree (expr, &ctx, ht);
10639 md5_finish_ctx (&ctx, checksum_after);
10642 if (memcmp (checksum_before, checksum_after, 16))
10643 fold_check_failed (expr, ret);
10649 print_fold_checksum (tree expr)
10651 struct md5_ctx ctx;
10652 unsigned char checksum[16], cnt;
10655 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10656 md5_init_ctx (&ctx);
10657 fold_checksum_tree (expr, &ctx, ht);
10658 md5_finish_ctx (&ctx, checksum);
10660 for (cnt = 0; cnt < 16; ++cnt)
10661 fprintf (stderr, "%02x", checksum[cnt]);
10662 putc ('\n', stderr);
10666 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10668 internal_error ("fold check: original tree changed by fold");
10672 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10675 enum tree_code code;
10676 struct tree_function_decl buf;
10681 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10682 <= sizeof (struct tree_function_decl))
10683 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10686 slot = htab_find_slot (ht, expr, INSERT);
10690 code = TREE_CODE (expr);
10691 if (TREE_CODE_CLASS (code) == tcc_declaration
10692 && DECL_ASSEMBLER_NAME_SET_P (expr))
10694 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10695 memcpy ((char *) &buf, expr, tree_size (expr));
10696 expr = (tree) &buf;
10697 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10699 else if (TREE_CODE_CLASS (code) == tcc_type
10700 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10701 || TYPE_CACHED_VALUES_P (expr)
10702 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10704 /* Allow these fields to be modified. */
10705 memcpy ((char *) &buf, expr, tree_size (expr));
10706 expr = (tree) &buf;
10707 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10708 TYPE_POINTER_TO (expr) = NULL;
10709 TYPE_REFERENCE_TO (expr) = NULL;
10710 if (TYPE_CACHED_VALUES_P (expr))
10712 TYPE_CACHED_VALUES_P (expr) = 0;
10713 TYPE_CACHED_VALUES (expr) = NULL;
10716 md5_process_bytes (expr, tree_size (expr), ctx);
10717 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10718 if (TREE_CODE_CLASS (code) != tcc_type
10719 && TREE_CODE_CLASS (code) != tcc_declaration
10720 && code != TREE_LIST)
10721 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10722 switch (TREE_CODE_CLASS (code))
10728 md5_process_bytes (TREE_STRING_POINTER (expr),
10729 TREE_STRING_LENGTH (expr), ctx);
10732 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10733 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10736 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10742 case tcc_exceptional:
10746 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10747 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10748 expr = TREE_CHAIN (expr);
10749 goto recursive_label;
10752 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10753 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10759 case tcc_expression:
10760 case tcc_reference:
10761 case tcc_comparison:
10764 case tcc_statement:
10765 len = TREE_CODE_LENGTH (code);
10766 for (i = 0; i < len; ++i)
10767 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10769 case tcc_declaration:
10770 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10771 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10772 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
10774 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10775 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10776 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10777 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10778 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10780 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10781 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10783 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10785 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10786 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10787 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10791 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10792 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10793 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10794 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10795 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10796 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10797 if (INTEGRAL_TYPE_P (expr)
10798 || SCALAR_FLOAT_TYPE_P (expr))
10800 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10801 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10803 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10804 if (TREE_CODE (expr) == RECORD_TYPE
10805 || TREE_CODE (expr) == UNION_TYPE
10806 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10807 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10808 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10817 /* Fold a unary tree expression with code CODE of type TYPE with an
10818 operand OP0. Return a folded expression if successful. Otherwise,
10819 return a tree expression with code CODE of type TYPE with an
10823 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10826 #ifdef ENABLE_FOLD_CHECKING
10827 unsigned char checksum_before[16], checksum_after[16];
10828 struct md5_ctx ctx;
10831 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10832 md5_init_ctx (&ctx);
10833 fold_checksum_tree (op0, &ctx, ht);
10834 md5_finish_ctx (&ctx, checksum_before);
10838 tem = fold_unary (code, type, op0);
10840 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10842 #ifdef ENABLE_FOLD_CHECKING
10843 md5_init_ctx (&ctx);
10844 fold_checksum_tree (op0, &ctx, ht);
10845 md5_finish_ctx (&ctx, checksum_after);
10848 if (memcmp (checksum_before, checksum_after, 16))
10849 fold_check_failed (op0, tem);
10854 /* Fold a binary tree expression with code CODE of type TYPE with
10855 operands OP0 and OP1. Return a folded expression if successful.
10856 Otherwise, return a tree expression with code CODE of type TYPE
10857 with operands OP0 and OP1. */
10860 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10864 #ifdef ENABLE_FOLD_CHECKING
10865 unsigned char checksum_before_op0[16],
10866 checksum_before_op1[16],
10867 checksum_after_op0[16],
10868 checksum_after_op1[16];
10869 struct md5_ctx ctx;
10872 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10873 md5_init_ctx (&ctx);
10874 fold_checksum_tree (op0, &ctx, ht);
10875 md5_finish_ctx (&ctx, checksum_before_op0);
10878 md5_init_ctx (&ctx);
10879 fold_checksum_tree (op1, &ctx, ht);
10880 md5_finish_ctx (&ctx, checksum_before_op1);
10884 tem = fold_binary (code, type, op0, op1);
10886 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10888 #ifdef ENABLE_FOLD_CHECKING
10889 md5_init_ctx (&ctx);
10890 fold_checksum_tree (op0, &ctx, ht);
10891 md5_finish_ctx (&ctx, checksum_after_op0);
10894 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10895 fold_check_failed (op0, tem);
10897 md5_init_ctx (&ctx);
10898 fold_checksum_tree (op1, &ctx, ht);
10899 md5_finish_ctx (&ctx, checksum_after_op1);
10902 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10903 fold_check_failed (op1, tem);
10908 /* Fold a ternary tree expression with code CODE of type TYPE with
10909 operands OP0, OP1, and OP2. Return a folded expression if
10910 successful. Otherwise, return a tree expression with code CODE of
10911 type TYPE with operands OP0, OP1, and OP2. */
10914 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10918 #ifdef ENABLE_FOLD_CHECKING
10919 unsigned char checksum_before_op0[16],
10920 checksum_before_op1[16],
10921 checksum_before_op2[16],
10922 checksum_after_op0[16],
10923 checksum_after_op1[16],
10924 checksum_after_op2[16];
10925 struct md5_ctx ctx;
10928 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10929 md5_init_ctx (&ctx);
10930 fold_checksum_tree (op0, &ctx, ht);
10931 md5_finish_ctx (&ctx, checksum_before_op0);
10934 md5_init_ctx (&ctx);
10935 fold_checksum_tree (op1, &ctx, ht);
10936 md5_finish_ctx (&ctx, checksum_before_op1);
10939 md5_init_ctx (&ctx);
10940 fold_checksum_tree (op2, &ctx, ht);
10941 md5_finish_ctx (&ctx, checksum_before_op2);
10945 tem = fold_ternary (code, type, op0, op1, op2);
10947 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10949 #ifdef ENABLE_FOLD_CHECKING
10950 md5_init_ctx (&ctx);
10951 fold_checksum_tree (op0, &ctx, ht);
10952 md5_finish_ctx (&ctx, checksum_after_op0);
10955 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10956 fold_check_failed (op0, tem);
10958 md5_init_ctx (&ctx);
10959 fold_checksum_tree (op1, &ctx, ht);
10960 md5_finish_ctx (&ctx, checksum_after_op1);
10963 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10964 fold_check_failed (op1, tem);
10966 md5_init_ctx (&ctx);
10967 fold_checksum_tree (op2, &ctx, ht);
10968 md5_finish_ctx (&ctx, checksum_after_op2);
10971 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10972 fold_check_failed (op2, tem);
10977 /* Perform constant folding and related simplification of initializer
10978 expression EXPR. These behave identically to "fold_buildN" but ignore
10979 potential run-time traps and exceptions that fold must preserve. */
10981 #define START_FOLD_INIT \
10982 int saved_signaling_nans = flag_signaling_nans;\
10983 int saved_trapping_math = flag_trapping_math;\
10984 int saved_rounding_math = flag_rounding_math;\
10985 int saved_trapv = flag_trapv;\
10986 flag_signaling_nans = 0;\
10987 flag_trapping_math = 0;\
10988 flag_rounding_math = 0;\
10991 #define END_FOLD_INIT \
10992 flag_signaling_nans = saved_signaling_nans;\
10993 flag_trapping_math = saved_trapping_math;\
10994 flag_rounding_math = saved_rounding_math;\
10995 flag_trapv = saved_trapv
10998 fold_build1_initializer (enum tree_code code, tree type, tree op)
11003 result = fold_build1 (code, type, op);
11010 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11015 result = fold_build2 (code, type, op0, op1);
11022 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11028 result = fold_build3 (code, type, op0, op1, op2);
11034 #undef START_FOLD_INIT
11035 #undef END_FOLD_INIT
11037 /* Determine if first argument is a multiple of second argument. Return 0 if
11038 it is not, or we cannot easily determined it to be.
11040 An example of the sort of thing we care about (at this point; this routine
11041 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11042 fold cases do now) is discovering that
11044 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11050 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11052 This code also handles discovering that
11054 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11056 is a multiple of 8 so we don't have to worry about dealing with a
11057 possible remainder.
11059 Note that we *look* inside a SAVE_EXPR only to determine how it was
11060 calculated; it is not safe for fold to do much of anything else with the
11061 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11062 at run time. For example, the latter example above *cannot* be implemented
11063 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11064 evaluation time of the original SAVE_EXPR is not necessarily the same at
11065 the time the new expression is evaluated. The only optimization of this
11066 sort that would be valid is changing
11068 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11072 SAVE_EXPR (I) * SAVE_EXPR (J)
11074 (where the same SAVE_EXPR (J) is used in the original and the
11075 transformed version). */
11078 multiple_of_p (tree type, tree top, tree bottom)
11080 if (operand_equal_p (top, bottom, 0))
11083 if (TREE_CODE (type) != INTEGER_TYPE)
11086 switch (TREE_CODE (top))
11089 /* Bitwise and provides a power of two multiple. If the mask is
11090 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11091 if (!integer_pow2p (bottom))
11096 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11097 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11101 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11102 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11105 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11109 op1 = TREE_OPERAND (top, 1);
11110 /* const_binop may not detect overflow correctly,
11111 so check for it explicitly here. */
11112 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11113 > TREE_INT_CST_LOW (op1)
11114 && TREE_INT_CST_HIGH (op1) == 0
11115 && 0 != (t1 = fold_convert (type,
11116 const_binop (LSHIFT_EXPR,
11119 && ! TREE_OVERFLOW (t1))
11120 return multiple_of_p (type, t1, bottom);
11125 /* Can't handle conversions from non-integral or wider integral type. */
11126 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11127 || (TYPE_PRECISION (type)
11128 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11131 /* .. fall through ... */
11134 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11137 if (TREE_CODE (bottom) != INTEGER_CST
11138 || (TYPE_UNSIGNED (type)
11139 && (tree_int_cst_sgn (top) < 0
11140 || tree_int_cst_sgn (bottom) < 0)))
11142 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11150 /* Return true if `t' is known to be non-negative. */
11153 tree_expr_nonnegative_p (tree t)
11155 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11158 switch (TREE_CODE (t))
11161 /* Query VRP to see if it has recorded any information about
11162 the range of this object. */
11163 return ssa_name_nonnegative_p (t);
11166 /* We can't return 1 if flag_wrapv is set because
11167 ABS_EXPR<INT_MIN> = INT_MIN. */
11168 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11173 return tree_int_cst_sgn (t) >= 0;
11176 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11179 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11180 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11181 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11183 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11184 both unsigned and at least 2 bits shorter than the result. */
11185 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11186 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11187 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11189 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11190 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11191 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11192 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11194 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11195 TYPE_PRECISION (inner2)) + 1;
11196 return prec < TYPE_PRECISION (TREE_TYPE (t));
11202 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11204 /* x * x for floating point x is always non-negative. */
11205 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11207 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11208 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11211 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11212 both unsigned and their total bits is shorter than the result. */
11213 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11214 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11215 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11217 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11218 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11219 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11220 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11221 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11222 < TYPE_PRECISION (TREE_TYPE (t));
11228 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11229 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11235 case TRUNC_DIV_EXPR:
11236 case CEIL_DIV_EXPR:
11237 case FLOOR_DIV_EXPR:
11238 case ROUND_DIV_EXPR:
11239 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11240 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11242 case TRUNC_MOD_EXPR:
11243 case CEIL_MOD_EXPR:
11244 case FLOOR_MOD_EXPR:
11245 case ROUND_MOD_EXPR:
11247 case NON_LVALUE_EXPR:
11249 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11251 case COMPOUND_EXPR:
11253 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11256 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11259 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11260 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
11264 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11265 tree outer_type = TREE_TYPE (t);
11267 if (TREE_CODE (outer_type) == REAL_TYPE)
11269 if (TREE_CODE (inner_type) == REAL_TYPE)
11270 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11271 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11273 if (TYPE_UNSIGNED (inner_type))
11275 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11278 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11280 if (TREE_CODE (inner_type) == REAL_TYPE)
11281 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11282 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11283 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11284 && TYPE_UNSIGNED (inner_type);
11291 tree temp = TARGET_EXPR_SLOT (t);
11292 t = TARGET_EXPR_INITIAL (t);
11294 /* If the initializer is non-void, then it's a normal expression
11295 that will be assigned to the slot. */
11296 if (!VOID_TYPE_P (t))
11297 return tree_expr_nonnegative_p (t);
11299 /* Otherwise, the initializer sets the slot in some way. One common
11300 way is an assignment statement at the end of the initializer. */
11303 if (TREE_CODE (t) == BIND_EXPR)
11304 t = expr_last (BIND_EXPR_BODY (t));
11305 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11306 || TREE_CODE (t) == TRY_CATCH_EXPR)
11307 t = expr_last (TREE_OPERAND (t, 0));
11308 else if (TREE_CODE (t) == STATEMENT_LIST)
11313 if (TREE_CODE (t) == MODIFY_EXPR
11314 && TREE_OPERAND (t, 0) == temp)
11315 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11322 tree fndecl = get_callee_fndecl (t);
11323 tree arglist = TREE_OPERAND (t, 1);
11324 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11325 switch (DECL_FUNCTION_CODE (fndecl))
11327 CASE_FLT_FN (BUILT_IN_ACOS):
11328 CASE_FLT_FN (BUILT_IN_ACOSH):
11329 CASE_FLT_FN (BUILT_IN_CABS):
11330 CASE_FLT_FN (BUILT_IN_COSH):
11331 CASE_FLT_FN (BUILT_IN_ERFC):
11332 CASE_FLT_FN (BUILT_IN_EXP):
11333 CASE_FLT_FN (BUILT_IN_EXP10):
11334 CASE_FLT_FN (BUILT_IN_EXP2):
11335 CASE_FLT_FN (BUILT_IN_FABS):
11336 CASE_FLT_FN (BUILT_IN_FDIM):
11337 CASE_FLT_FN (BUILT_IN_HYPOT):
11338 CASE_FLT_FN (BUILT_IN_POW10):
11339 CASE_INT_FN (BUILT_IN_FFS):
11340 CASE_INT_FN (BUILT_IN_PARITY):
11341 CASE_INT_FN (BUILT_IN_POPCOUNT):
11345 CASE_FLT_FN (BUILT_IN_SQRT):
11346 /* sqrt(-0.0) is -0.0. */
11347 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11349 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11351 CASE_FLT_FN (BUILT_IN_ASINH):
11352 CASE_FLT_FN (BUILT_IN_ATAN):
11353 CASE_FLT_FN (BUILT_IN_ATANH):
11354 CASE_FLT_FN (BUILT_IN_CBRT):
11355 CASE_FLT_FN (BUILT_IN_CEIL):
11356 CASE_FLT_FN (BUILT_IN_ERF):
11357 CASE_FLT_FN (BUILT_IN_EXPM1):
11358 CASE_FLT_FN (BUILT_IN_FLOOR):
11359 CASE_FLT_FN (BUILT_IN_FMOD):
11360 CASE_FLT_FN (BUILT_IN_FREXP):
11361 CASE_FLT_FN (BUILT_IN_LCEIL):
11362 CASE_FLT_FN (BUILT_IN_LDEXP):
11363 CASE_FLT_FN (BUILT_IN_LFLOOR):
11364 CASE_FLT_FN (BUILT_IN_LLCEIL):
11365 CASE_FLT_FN (BUILT_IN_LLFLOOR):
11366 CASE_FLT_FN (BUILT_IN_LLRINT):
11367 CASE_FLT_FN (BUILT_IN_LLROUND):
11368 CASE_FLT_FN (BUILT_IN_LRINT):
11369 CASE_FLT_FN (BUILT_IN_LROUND):
11370 CASE_FLT_FN (BUILT_IN_MODF):
11371 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11372 CASE_FLT_FN (BUILT_IN_POW):
11373 CASE_FLT_FN (BUILT_IN_RINT):
11374 CASE_FLT_FN (BUILT_IN_ROUND):
11375 CASE_FLT_FN (BUILT_IN_SIGNBIT):
11376 CASE_FLT_FN (BUILT_IN_SINH):
11377 CASE_FLT_FN (BUILT_IN_TANH):
11378 CASE_FLT_FN (BUILT_IN_TRUNC):
11379 /* True if the 1st argument is nonnegative. */
11380 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11382 CASE_FLT_FN (BUILT_IN_FMAX):
11383 /* True if the 1st OR 2nd arguments are nonnegative. */
11384 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11385 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11387 CASE_FLT_FN (BUILT_IN_FMIN):
11388 /* True if the 1st AND 2nd arguments are nonnegative. */
11389 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11390 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11392 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11393 /* True if the 2nd argument is nonnegative. */
11394 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11401 /* ... fall through ... */
11404 if (truth_value_p (TREE_CODE (t)))
11405 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11409 /* We don't know sign of `t', so be conservative and return false. */
11413 /* Return true when T is an address and is known to be nonzero.
11414 For floating point we further ensure that T is not denormal.
11415 Similar logic is present in nonzero_address in rtlanal.h. */
11418 tree_expr_nonzero_p (tree t)
11420 tree type = TREE_TYPE (t);
11422 /* Doing something useful for floating point would need more work. */
11423 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11426 switch (TREE_CODE (t))
11429 /* Query VRP to see if it has recorded any information about
11430 the range of this object. */
11431 return ssa_name_nonzero_p (t);
11434 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11437 /* We used to test for !integer_zerop here. This does not work correctly
11438 if TREE_CONSTANT_OVERFLOW (t). */
11439 return (TREE_INT_CST_LOW (t) != 0
11440 || TREE_INT_CST_HIGH (t) != 0);
11443 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11445 /* With the presence of negative values it is hard
11446 to say something. */
11447 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11448 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11450 /* One of operands must be positive and the other non-negative. */
11451 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11452 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11457 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11459 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11460 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11466 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11467 tree outer_type = TREE_TYPE (t);
11469 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
11470 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11476 tree base = get_base_address (TREE_OPERAND (t, 0));
11481 /* Weak declarations may link to NULL. */
11482 if (VAR_OR_FUNCTION_DECL_P (base))
11483 return !DECL_WEAK (base);
11485 /* Constants are never weak. */
11486 if (CONSTANT_CLASS_P (base))
11493 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11494 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11497 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11498 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11501 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11503 /* When both operands are nonzero, then MAX must be too. */
11504 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11507 /* MAX where operand 0 is positive is positive. */
11508 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11510 /* MAX where operand 1 is positive is positive. */
11511 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11512 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11516 case COMPOUND_EXPR:
11519 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11522 case NON_LVALUE_EXPR:
11523 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11526 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11527 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11530 return alloca_call_p (t);
11538 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11539 attempt to fold the expression to a constant without modifying TYPE,
11542 If the expression could be simplified to a constant, then return
11543 the constant. If the expression would not be simplified to a
11544 constant, then return NULL_TREE. */
11547 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11549 tree tem = fold_binary (code, type, op0, op1);
11550 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11553 /* Given the components of a unary expression CODE, TYPE and OP0,
11554 attempt to fold the expression to a constant without modifying
11557 If the expression could be simplified to a constant, then return
11558 the constant. If the expression would not be simplified to a
11559 constant, then return NULL_TREE. */
11562 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11564 tree tem = fold_unary (code, type, op0);
11565 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11568 /* If EXP represents referencing an element in a constant string
11569 (either via pointer arithmetic or array indexing), return the
11570 tree representing the value accessed, otherwise return NULL. */
11573 fold_read_from_constant_string (tree exp)
11575 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11577 tree exp1 = TREE_OPERAND (exp, 0);
11581 if (TREE_CODE (exp) == INDIRECT_REF)
11582 string = string_constant (exp1, &index);
11585 tree low_bound = array_ref_low_bound (exp);
11586 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11588 /* Optimize the special-case of a zero lower bound.
11590 We convert the low_bound to sizetype to avoid some problems
11591 with constant folding. (E.g. suppose the lower bound is 1,
11592 and its mode is QI. Without the conversion,l (ARRAY
11593 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11594 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11595 if (! integer_zerop (low_bound))
11596 index = size_diffop (index, fold_convert (sizetype, low_bound));
11602 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11603 && TREE_CODE (string) == STRING_CST
11604 && TREE_CODE (index) == INTEGER_CST
11605 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11606 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11608 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11609 return fold_convert (TREE_TYPE (exp),
11610 build_int_cst (NULL_TREE,
11611 (TREE_STRING_POINTER (string)
11612 [TREE_INT_CST_LOW (index)])));
11617 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11618 an integer constant or real constant.
11620 TYPE is the type of the result. */
11623 fold_negate_const (tree arg0, tree type)
11625 tree t = NULL_TREE;
11627 switch (TREE_CODE (arg0))
11631 unsigned HOST_WIDE_INT low;
11632 HOST_WIDE_INT high;
11633 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11634 TREE_INT_CST_HIGH (arg0),
11636 t = build_int_cst_wide (type, low, high);
11637 t = force_fit_type (t, 1,
11638 (overflow | TREE_OVERFLOW (arg0))
11639 && !TYPE_UNSIGNED (type),
11640 TREE_CONSTANT_OVERFLOW (arg0));
11645 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11649 gcc_unreachable ();
11655 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11656 an integer constant or real constant.
11658 TYPE is the type of the result. */
11661 fold_abs_const (tree arg0, tree type)
11663 tree t = NULL_TREE;
11665 switch (TREE_CODE (arg0))
11668 /* If the value is unsigned, then the absolute value is
11669 the same as the ordinary value. */
11670 if (TYPE_UNSIGNED (type))
11672 /* Similarly, if the value is non-negative. */
11673 else if (INT_CST_LT (integer_minus_one_node, arg0))
11675 /* If the value is negative, then the absolute value is
11679 unsigned HOST_WIDE_INT low;
11680 HOST_WIDE_INT high;
11681 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11682 TREE_INT_CST_HIGH (arg0),
11684 t = build_int_cst_wide (type, low, high);
11685 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11686 TREE_CONSTANT_OVERFLOW (arg0));
11691 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11692 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11698 gcc_unreachable ();
11704 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11705 constant. TYPE is the type of the result. */
11708 fold_not_const (tree arg0, tree type)
11710 tree t = NULL_TREE;
11712 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11714 t = build_int_cst_wide (type,
11715 ~ TREE_INT_CST_LOW (arg0),
11716 ~ TREE_INT_CST_HIGH (arg0));
11717 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11718 TREE_CONSTANT_OVERFLOW (arg0));
11723 /* Given CODE, a relational operator, the target type, TYPE and two
11724 constant operands OP0 and OP1, return the result of the
11725 relational operation. If the result is not a compile time
11726 constant, then return NULL_TREE. */
11729 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11731 int result, invert;
11733 /* From here on, the only cases we handle are when the result is
11734 known to be a constant. */
11736 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11738 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11739 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11741 /* Handle the cases where either operand is a NaN. */
11742 if (real_isnan (c0) || real_isnan (c1))
11752 case UNORDERED_EXPR:
11766 if (flag_trapping_math)
11772 gcc_unreachable ();
11775 return constant_boolean_node (result, type);
11778 return constant_boolean_node (real_compare (code, c0, c1), type);
11781 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11783 To compute GT, swap the arguments and do LT.
11784 To compute GE, do LT and invert the result.
11785 To compute LE, swap the arguments, do LT and invert the result.
11786 To compute NE, do EQ and invert the result.
11788 Therefore, the code below must handle only EQ and LT. */
11790 if (code == LE_EXPR || code == GT_EXPR)
11795 code = swap_tree_comparison (code);
11798 /* Note that it is safe to invert for real values here because we
11799 have already handled the one case that it matters. */
11802 if (code == NE_EXPR || code == GE_EXPR)
11805 code = invert_tree_comparison (code, false);
11808 /* Compute a result for LT or EQ if args permit;
11809 Otherwise return T. */
11810 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11812 if (code == EQ_EXPR)
11813 result = tree_int_cst_equal (op0, op1);
11814 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11815 result = INT_CST_LT_UNSIGNED (op0, op1);
11817 result = INT_CST_LT (op0, op1);
11824 return constant_boolean_node (result, type);
11827 /* Build an expression for the a clean point containing EXPR with type TYPE.
11828 Don't build a cleanup point expression for EXPR which don't have side
11832 fold_build_cleanup_point_expr (tree type, tree expr)
11834 /* If the expression does not have side effects then we don't have to wrap
11835 it with a cleanup point expression. */
11836 if (!TREE_SIDE_EFFECTS (expr))
11839 /* If the expression is a return, check to see if the expression inside the
11840 return has no side effects or the right hand side of the modify expression
11841 inside the return. If either don't have side effects set we don't need to
11842 wrap the expression in a cleanup point expression. Note we don't check the
11843 left hand side of the modify because it should always be a return decl. */
11844 if (TREE_CODE (expr) == RETURN_EXPR)
11846 tree op = TREE_OPERAND (expr, 0);
11847 if (!op || !TREE_SIDE_EFFECTS (op))
11849 op = TREE_OPERAND (op, 1);
11850 if (!TREE_SIDE_EFFECTS (op))
11854 return build1 (CLEANUP_POINT_EXPR, type, expr);
11857 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11858 avoid confusing the gimplify process. */
11861 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11863 /* The size of the object is not relevant when talking about its address. */
11864 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11865 t = TREE_OPERAND (t, 0);
11867 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11868 if (TREE_CODE (t) == INDIRECT_REF
11869 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11871 t = TREE_OPERAND (t, 0);
11872 if (TREE_TYPE (t) != ptrtype)
11873 t = build1 (NOP_EXPR, ptrtype, t);
11879 while (handled_component_p (base))
11880 base = TREE_OPERAND (base, 0);
11882 TREE_ADDRESSABLE (base) = 1;
11884 t = build1 (ADDR_EXPR, ptrtype, t);
11891 build_fold_addr_expr (tree t)
11893 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11896 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11897 of an indirection through OP0, or NULL_TREE if no simplification is
11901 fold_indirect_ref_1 (tree type, tree op0)
11907 subtype = TREE_TYPE (sub);
11908 if (!POINTER_TYPE_P (subtype))
11911 if (TREE_CODE (sub) == ADDR_EXPR)
11913 tree op = TREE_OPERAND (sub, 0);
11914 tree optype = TREE_TYPE (op);
11915 /* *&p => p; make sure to handle *&"str"[cst] here. */
11916 if (type == optype)
11918 tree fop = fold_read_from_constant_string (op);
11924 /* *(foo *)&fooarray => fooarray[0] */
11925 else if (TREE_CODE (optype) == ARRAY_TYPE
11926 && type == TREE_TYPE (optype))
11928 tree type_domain = TYPE_DOMAIN (optype);
11929 tree min_val = size_zero_node;
11930 if (type_domain && TYPE_MIN_VALUE (type_domain))
11931 min_val = TYPE_MIN_VALUE (type_domain);
11932 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11934 /* *(foo *)&complexfoo => __real__ complexfoo */
11935 else if (TREE_CODE (optype) == COMPLEX_TYPE
11936 && type == TREE_TYPE (optype))
11937 return fold_build1 (REALPART_EXPR, type, op);
11940 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
11941 if (TREE_CODE (sub) == PLUS_EXPR
11942 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
11944 tree op00 = TREE_OPERAND (sub, 0);
11945 tree op01 = TREE_OPERAND (sub, 1);
11949 op00type = TREE_TYPE (op00);
11950 if (TREE_CODE (op00) == ADDR_EXPR
11951 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
11952 && type == TREE_TYPE (TREE_TYPE (op00type)))
11954 tree size = TYPE_SIZE_UNIT (type);
11955 if (tree_int_cst_equal (size, op01))
11956 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
11960 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11961 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11962 && type == TREE_TYPE (TREE_TYPE (subtype)))
11965 tree min_val = size_zero_node;
11966 sub = build_fold_indirect_ref (sub);
11967 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11968 if (type_domain && TYPE_MIN_VALUE (type_domain))
11969 min_val = TYPE_MIN_VALUE (type_domain);
11970 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11976 /* Builds an expression for an indirection through T, simplifying some
11980 build_fold_indirect_ref (tree t)
11982 tree type = TREE_TYPE (TREE_TYPE (t));
11983 tree sub = fold_indirect_ref_1 (type, t);
11988 return build1 (INDIRECT_REF, type, t);
11991 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11994 fold_indirect_ref (tree t)
11996 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12004 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12005 whose result is ignored. The type of the returned tree need not be
12006 the same as the original expression. */
12009 fold_ignored_result (tree t)
12011 if (!TREE_SIDE_EFFECTS (t))
12012 return integer_zero_node;
12015 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12018 t = TREE_OPERAND (t, 0);
12022 case tcc_comparison:
12023 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12024 t = TREE_OPERAND (t, 0);
12025 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12026 t = TREE_OPERAND (t, 1);
12031 case tcc_expression:
12032 switch (TREE_CODE (t))
12034 case COMPOUND_EXPR:
12035 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12037 t = TREE_OPERAND (t, 0);
12041 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12042 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12044 t = TREE_OPERAND (t, 0);
12057 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12058 This can only be applied to objects of a sizetype. */
12061 round_up (tree value, int divisor)
12063 tree div = NULL_TREE;
12065 gcc_assert (divisor > 0);
12069 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12070 have to do anything. Only do this when we are not given a const,
12071 because in that case, this check is more expensive than just
12073 if (TREE_CODE (value) != INTEGER_CST)
12075 div = build_int_cst (TREE_TYPE (value), divisor);
12077 if (multiple_of_p (TREE_TYPE (value), value, div))
12081 /* If divisor is a power of two, simplify this to bit manipulation. */
12082 if (divisor == (divisor & -divisor))
12086 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12087 value = size_binop (PLUS_EXPR, value, t);
12088 t = build_int_cst (TREE_TYPE (value), -divisor);
12089 value = size_binop (BIT_AND_EXPR, value, t);
12094 div = build_int_cst (TREE_TYPE (value), divisor);
12095 value = size_binop (CEIL_DIV_EXPR, value, div);
12096 value = size_binop (MULT_EXPR, value, div);
12102 /* Likewise, but round down. */
12105 round_down (tree value, int divisor)
12107 tree div = NULL_TREE;
12109 gcc_assert (divisor > 0);
12113 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12114 have to do anything. Only do this when we are not given a const,
12115 because in that case, this check is more expensive than just
12117 if (TREE_CODE (value) != INTEGER_CST)
12119 div = build_int_cst (TREE_TYPE (value), divisor);
12121 if (multiple_of_p (TREE_TYPE (value), value, div))
12125 /* If divisor is a power of two, simplify this to bit manipulation. */
12126 if (divisor == (divisor & -divisor))
12130 t = build_int_cst (TREE_TYPE (value), -divisor);
12131 value = size_binop (BIT_AND_EXPR, value, t);
12136 div = build_int_cst (TREE_TYPE (value), divisor);
12137 value = size_binop (FLOOR_DIV_EXPR, value, div);
12138 value = size_binop (MULT_EXPR, value, div);
12144 /* Returns the pointer to the base of the object addressed by EXP and
12145 extracts the information about the offset of the access, storing it
12146 to PBITPOS and POFFSET. */
12149 split_address_to_core_and_offset (tree exp,
12150 HOST_WIDE_INT *pbitpos, tree *poffset)
12153 enum machine_mode mode;
12154 int unsignedp, volatilep;
12155 HOST_WIDE_INT bitsize;
12157 if (TREE_CODE (exp) == ADDR_EXPR)
12159 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12160 poffset, &mode, &unsignedp, &volatilep,
12162 core = build_fold_addr_expr (core);
12168 *poffset = NULL_TREE;
12174 /* Returns true if addresses of E1 and E2 differ by a constant, false
12175 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12178 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12181 HOST_WIDE_INT bitpos1, bitpos2;
12182 tree toffset1, toffset2, tdiff, type;
12184 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12185 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12187 if (bitpos1 % BITS_PER_UNIT != 0
12188 || bitpos2 % BITS_PER_UNIT != 0
12189 || !operand_equal_p (core1, core2, 0))
12192 if (toffset1 && toffset2)
12194 type = TREE_TYPE (toffset1);
12195 if (type != TREE_TYPE (toffset2))
12196 toffset2 = fold_convert (type, toffset2);
12198 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12199 if (!cst_and_fits_in_hwi (tdiff))
12202 *diff = int_cst_value (tdiff);
12204 else if (toffset1 || toffset2)
12206 /* If only one of the offsets is non-constant, the difference cannot
12213 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12217 /* Simplify the floating point expression EXP when the sign of the
12218 result is not significant. Return NULL_TREE if no simplification
12222 fold_strip_sign_ops (tree exp)
12226 switch (TREE_CODE (exp))
12230 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12231 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12235 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12237 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12238 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12239 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12240 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12241 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12242 arg1 ? arg1 : TREE_OPERAND (exp, 1));