1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
201 unsigned HOST_WIDE_INT low;
204 int sign_extended_type;
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)low < 0)
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 low |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
268 || (overflowable > 0 && sign_extended_type))
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
274 else if (overflowed_const)
277 TREE_CONSTANT_OVERFLOW (t) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
294 unsigned HOST_WIDE_INT l;
298 h = h1 + h2 + (l < l1);
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
318 return (*hv & h1) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
350 memset (prod, 0, sizeof prod);
352 for (i = 0; i < 4; i++)
355 for (j = 0; j < 4; j++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
397 unsigned HOST_WIDE_INT signmask;
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
405 if (SHIFT_COUNT_TRUNCATED)
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count >= HOST_BITS_PER_WIDE_INT)
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
460 unsigned HOST_WIDE_INT signmask;
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
466 if (SHIFT_COUNT_TRUNCATED)
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count >= HOST_BITS_PER_WIDE_INT)
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count >= (HOST_WIDE_INT)prec)
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
603 neg_double (lden, hden, &lden, &hden);
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo, 0, sizeof quo);
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale = BASE / (den[den_hi_sig] + 1);
663 { /* scale divisor and dividend */
665 for (i = 0; i <= 4 - 1; i++)
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
674 for (i = 0; i <= 4 - 1; i++)
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
734 num [num_hi_sig] += carry;
737 /* Store the quotient digit. */
742 decode (quo, lquo, hquo);
745 /* If result is negative, make it so. */
747 neg_double (*lquo, *hquo, lquo, hquo);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 case ROUND_MOD_EXPR: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
792 /* Get absolute values. */
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
796 neg_double (lden, hden, &labs_den, &habs_den);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, <wice, &htwice);
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
857 return build_int_cst_wide (type, quol, quoh);
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code)
868 CASE_FLT_FN (BUILT_IN_ASIN):
869 CASE_FLT_FN (BUILT_IN_ASINH):
870 CASE_FLT_FN (BUILT_IN_ATAN):
871 CASE_FLT_FN (BUILT_IN_ATANH):
872 CASE_FLT_FN (BUILT_IN_CBRT):
873 CASE_FLT_FN (BUILT_IN_SIN):
874 CASE_FLT_FN (BUILT_IN_SINH):
875 CASE_FLT_FN (BUILT_IN_TAN):
876 CASE_FLT_FN (BUILT_IN_TANH):
885 /* Check whether we may negate an integer constant T without causing
889 may_negate_without_overflow_p (tree t)
891 unsigned HOST_WIDE_INT val;
895 gcc_assert (TREE_CODE (t) == INTEGER_CST);
897 type = TREE_TYPE (t);
898 if (TYPE_UNSIGNED (type))
901 prec = TYPE_PRECISION (type);
902 if (prec > HOST_BITS_PER_WIDE_INT)
904 if (TREE_INT_CST_LOW (t) != 0)
906 prec -= HOST_BITS_PER_WIDE_INT;
907 val = TREE_INT_CST_HIGH (t);
910 val = TREE_INT_CST_LOW (t);
911 if (prec < HOST_BITS_PER_WIDE_INT)
912 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
913 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
916 /* Determine whether an expression T can be cheaply negated using
917 the function negate_expr. */
920 negate_expr_p (tree t)
927 type = TREE_TYPE (t);
930 switch (TREE_CODE (t))
933 if (TYPE_UNSIGNED (type) || ! flag_trapv)
936 /* Check that -CST will not overflow type. */
937 return may_negate_without_overflow_p (t);
939 return INTEGRAL_TYPE_P (type);
946 return negate_expr_p (TREE_REALPART (t))
947 && negate_expr_p (TREE_IMAGPART (t));
950 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952 /* -(A + B) -> (-B) - A. */
953 if (negate_expr_p (TREE_OPERAND (t, 1))
954 && reorder_operands_p (TREE_OPERAND (t, 0),
955 TREE_OPERAND (t, 1)))
957 /* -(A + B) -> (-A) - B. */
958 return negate_expr_p (TREE_OPERAND (t, 0));
961 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
962 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
963 && reorder_operands_p (TREE_OPERAND (t, 0),
964 TREE_OPERAND (t, 1));
967 if (TYPE_UNSIGNED (TREE_TYPE (t)))
973 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
974 return negate_expr_p (TREE_OPERAND (t, 1))
975 || negate_expr_p (TREE_OPERAND (t, 0));
983 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
989 /* Negate -((double)float) as (double)(-float). */
990 if (TREE_CODE (type) == REAL_TYPE)
992 tree tem = strip_float_extensions (t);
994 return negate_expr_p (tem);
999 /* Negate -f(x) as f(-x). */
1000 if (negate_mathfn_p (builtin_mathfn_code (t)))
1001 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1005 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1006 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1008 tree op1 = TREE_OPERAND (t, 1);
1009 if (TREE_INT_CST_HIGH (op1) == 0
1010 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1011 == TREE_INT_CST_LOW (op1))
1022 /* Given T, an expression, return the negation of T. Allow for T to be
1023 null, in which case return null. */
1026 negate_expr (tree t)
1034 type = TREE_TYPE (t);
1035 STRIP_SIGN_NOPS (t);
1037 switch (TREE_CODE (t))
1039 /* Convert - (~A) to A + 1. */
1041 if (INTEGRAL_TYPE_P (type))
1042 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1043 build_int_cst (type, 1));
1047 tem = fold_negate_const (t, type);
1048 if (! TREE_OVERFLOW (tem)
1049 || TYPE_UNSIGNED (type)
1055 tem = fold_negate_const (t, type);
1056 /* Two's complement FP formats, such as c4x, may overflow. */
1057 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1058 return fold_convert (type, tem);
1063 tree rpart = negate_expr (TREE_REALPART (t));
1064 tree ipart = negate_expr (TREE_IMAGPART (t));
1066 if ((TREE_CODE (rpart) == REAL_CST
1067 && TREE_CODE (ipart) == REAL_CST)
1068 || (TREE_CODE (rpart) == INTEGER_CST
1069 && TREE_CODE (ipart) == INTEGER_CST))
1070 return build_complex (type, rpart, ipart);
1075 return fold_convert (type, TREE_OPERAND (t, 0));
1078 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1080 /* -(A + B) -> (-B) - A. */
1081 if (negate_expr_p (TREE_OPERAND (t, 1))
1082 && reorder_operands_p (TREE_OPERAND (t, 0),
1083 TREE_OPERAND (t, 1)))
1085 tem = negate_expr (TREE_OPERAND (t, 1));
1086 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1087 tem, TREE_OPERAND (t, 0));
1088 return fold_convert (type, tem);
1091 /* -(A + B) -> (-A) - B. */
1092 if (negate_expr_p (TREE_OPERAND (t, 0)))
1094 tem = negate_expr (TREE_OPERAND (t, 0));
1095 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1096 tem, TREE_OPERAND (t, 1));
1097 return fold_convert (type, tem);
1103 /* - (A - B) -> B - A */
1104 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1105 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1106 return fold_convert (type,
1107 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1108 TREE_OPERAND (t, 1),
1109 TREE_OPERAND (t, 0)));
1113 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1119 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1121 tem = TREE_OPERAND (t, 1);
1122 if (negate_expr_p (tem))
1123 return fold_convert (type,
1124 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1125 TREE_OPERAND (t, 0),
1126 negate_expr (tem)));
1127 tem = TREE_OPERAND (t, 0);
1128 if (negate_expr_p (tem))
1129 return fold_convert (type,
1130 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1132 TREE_OPERAND (t, 1)));
1136 case TRUNC_DIV_EXPR:
1137 case ROUND_DIV_EXPR:
1138 case FLOOR_DIV_EXPR:
1140 case EXACT_DIV_EXPR:
1141 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1143 tem = TREE_OPERAND (t, 1);
1144 if (negate_expr_p (tem))
1145 return fold_convert (type,
1146 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1147 TREE_OPERAND (t, 0),
1148 negate_expr (tem)));
1149 tem = TREE_OPERAND (t, 0);
1150 if (negate_expr_p (tem))
1151 return fold_convert (type,
1152 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1154 TREE_OPERAND (t, 1)));
1159 /* Convert -((double)float) into (double)(-float). */
1160 if (TREE_CODE (type) == REAL_TYPE)
1162 tem = strip_float_extensions (t);
1163 if (tem != t && negate_expr_p (tem))
1164 return fold_convert (type, negate_expr (tem));
1169 /* Negate -f(x) as f(-x). */
1170 if (negate_mathfn_p (builtin_mathfn_code (t))
1171 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1173 tree fndecl, arg, arglist;
1175 fndecl = get_callee_fndecl (t);
1176 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1177 arglist = build_tree_list (NULL_TREE, arg);
1178 return build_function_call_expr (fndecl, arglist);
1183 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1184 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1186 tree op1 = TREE_OPERAND (t, 1);
1187 if (TREE_INT_CST_HIGH (op1) == 0
1188 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1189 == TREE_INT_CST_LOW (op1))
1191 tree ntype = TYPE_UNSIGNED (type)
1192 ? lang_hooks.types.signed_type (type)
1193 : lang_hooks.types.unsigned_type (type);
1194 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1195 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1196 return fold_convert (type, temp);
1205 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1206 return fold_convert (type, tem);
1209 /* Split a tree IN into a constant, literal and variable parts that could be
1210 combined with CODE to make IN. "constant" means an expression with
1211 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1212 commutative arithmetic operation. Store the constant part into *CONP,
1213 the literal in *LITP and return the variable part. If a part isn't
1214 present, set it to null. If the tree does not decompose in this way,
1215 return the entire tree as the variable part and the other parts as null.
1217 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1218 case, we negate an operand that was subtracted. Except if it is a
1219 literal for which we use *MINUS_LITP instead.
1221 If NEGATE_P is true, we are negating all of IN, again except a literal
1222 for which we use *MINUS_LITP instead.
1224 If IN is itself a literal or constant, return it as appropriate.
1226 Note that we do not guarantee that any of the three values will be the
1227 same type as IN, but they will have the same signedness and mode. */
1230 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1231 tree *minus_litp, int negate_p)
1239 /* Strip any conversions that don't change the machine mode or signedness. */
1240 STRIP_SIGN_NOPS (in);
1242 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1244 else if (TREE_CODE (in) == code
1245 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1246 /* We can associate addition and subtraction together (even
1247 though the C standard doesn't say so) for integers because
1248 the value is not affected. For reals, the value might be
1249 affected, so we can't. */
1250 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1251 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1253 tree op0 = TREE_OPERAND (in, 0);
1254 tree op1 = TREE_OPERAND (in, 1);
1255 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1256 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1258 /* First see if either of the operands is a literal, then a constant. */
1259 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1260 *litp = op0, op0 = 0;
1261 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1262 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1264 if (op0 != 0 && TREE_CONSTANT (op0))
1265 *conp = op0, op0 = 0;
1266 else if (op1 != 0 && TREE_CONSTANT (op1))
1267 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1269 /* If we haven't dealt with either operand, this is not a case we can
1270 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1271 if (op0 != 0 && op1 != 0)
1276 var = op1, neg_var_p = neg1_p;
1278 /* Now do any needed negations. */
1280 *minus_litp = *litp, *litp = 0;
1282 *conp = negate_expr (*conp);
1284 var = negate_expr (var);
1286 else if (TREE_CONSTANT (in))
1294 *minus_litp = *litp, *litp = 0;
1295 else if (*minus_litp)
1296 *litp = *minus_litp, *minus_litp = 0;
1297 *conp = negate_expr (*conp);
1298 var = negate_expr (var);
1304 /* Re-associate trees split by the above function. T1 and T2 are either
1305 expressions to associate or null. Return the new expression, if any. If
1306 we build an operation, do it in TYPE and with CODE. */
1309 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1316 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1317 try to fold this since we will have infinite recursion. But do
1318 deal with any NEGATE_EXPRs. */
1319 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1320 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1322 if (code == PLUS_EXPR)
1324 if (TREE_CODE (t1) == NEGATE_EXPR)
1325 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1326 fold_convert (type, TREE_OPERAND (t1, 0)));
1327 else if (TREE_CODE (t2) == NEGATE_EXPR)
1328 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1329 fold_convert (type, TREE_OPERAND (t2, 0)));
1330 else if (integer_zerop (t2))
1331 return fold_convert (type, t1);
1333 else if (code == MINUS_EXPR)
1335 if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1339 return build2 (code, type, fold_convert (type, t1),
1340 fold_convert (type, t2));
1343 return fold_build2 (code, type, fold_convert (type, t1),
1344 fold_convert (type, t2));
1347 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1348 to produce a new constant. Return NULL_TREE if we don't know how
1349 to evaluate CODE at compile-time.
1351 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1354 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1356 unsigned HOST_WIDE_INT int1l, int2l;
1357 HOST_WIDE_INT int1h, int2h;
1358 unsigned HOST_WIDE_INT low;
1360 unsigned HOST_WIDE_INT garbagel;
1361 HOST_WIDE_INT garbageh;
1363 tree type = TREE_TYPE (arg1);
1364 int uns = TYPE_UNSIGNED (type);
1366 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1369 int1l = TREE_INT_CST_LOW (arg1);
1370 int1h = TREE_INT_CST_HIGH (arg1);
1371 int2l = TREE_INT_CST_LOW (arg2);
1372 int2h = TREE_INT_CST_HIGH (arg2);
1377 low = int1l | int2l, hi = int1h | int2h;
1381 low = int1l ^ int2l, hi = int1h ^ int2h;
1385 low = int1l & int2l, hi = int1h & int2h;
1391 /* It's unclear from the C standard whether shifts can overflow.
1392 The following code ignores overflow; perhaps a C standard
1393 interpretation ruling is needed. */
1394 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1401 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1406 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1410 neg_double (int2l, int2h, &low, &hi);
1411 add_double (int1l, int1h, low, hi, &low, &hi);
1412 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1416 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1419 case TRUNC_DIV_EXPR:
1420 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1421 case EXACT_DIV_EXPR:
1422 /* This is a shortcut for a common special case. */
1423 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1424 && ! TREE_CONSTANT_OVERFLOW (arg1)
1425 && ! TREE_CONSTANT_OVERFLOW (arg2)
1426 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1428 if (code == CEIL_DIV_EXPR)
1431 low = int1l / int2l, hi = 0;
1435 /* ... fall through ... */
1437 case ROUND_DIV_EXPR:
1438 if (int2h == 0 && int2l == 0)
1440 if (int2h == 0 && int2l == 1)
1442 low = int1l, hi = int1h;
1445 if (int1l == int2l && int1h == int2h
1446 && ! (int1l == 0 && int1h == 0))
1451 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1452 &low, &hi, &garbagel, &garbageh);
1455 case TRUNC_MOD_EXPR:
1456 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1457 /* This is a shortcut for a common special case. */
1458 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1459 && ! TREE_CONSTANT_OVERFLOW (arg1)
1460 && ! TREE_CONSTANT_OVERFLOW (arg2)
1461 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1463 if (code == CEIL_MOD_EXPR)
1465 low = int1l % int2l, hi = 0;
1469 /* ... fall through ... */
1471 case ROUND_MOD_EXPR:
1472 if (int2h == 0 && int2l == 0)
1474 overflow = div_and_round_double (code, uns,
1475 int1l, int1h, int2l, int2h,
1476 &garbagel, &garbageh, &low, &hi);
1482 low = (((unsigned HOST_WIDE_INT) int1h
1483 < (unsigned HOST_WIDE_INT) int2h)
1484 || (((unsigned HOST_WIDE_INT) int1h
1485 == (unsigned HOST_WIDE_INT) int2h)
1488 low = (int1h < int2h
1489 || (int1h == int2h && int1l < int2l));
1491 if (low == (code == MIN_EXPR))
1492 low = int1l, hi = int1h;
1494 low = int2l, hi = int2h;
1501 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1505 /* Propagate overflow flags ourselves. */
1506 if (((!uns || is_sizetype) && overflow)
1507 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1510 TREE_OVERFLOW (t) = 1;
1511 TREE_CONSTANT_OVERFLOW (t) = 1;
1513 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1520 t = force_fit_type (t, 1,
1521 ((!uns || is_sizetype) && overflow)
1522 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1523 TREE_CONSTANT_OVERFLOW (arg1)
1524 | TREE_CONSTANT_OVERFLOW (arg2));
1529 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1530 constant. We assume ARG1 and ARG2 have the same data type, or at least
1531 are the same kind of constant and the same machine mode.
1533 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1536 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1541 if (TREE_CODE (arg1) == INTEGER_CST)
1542 return int_const_binop (code, arg1, arg2, notrunc);
1544 if (TREE_CODE (arg1) == REAL_CST)
1546 enum machine_mode mode;
1549 REAL_VALUE_TYPE value;
1550 REAL_VALUE_TYPE result;
1554 /* The following codes are handled by real_arithmetic. */
1569 d1 = TREE_REAL_CST (arg1);
1570 d2 = TREE_REAL_CST (arg2);
1572 type = TREE_TYPE (arg1);
1573 mode = TYPE_MODE (type);
1575 /* Don't perform operation if we honor signaling NaNs and
1576 either operand is a NaN. */
1577 if (HONOR_SNANS (mode)
1578 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1581 /* Don't perform operation if it would raise a division
1582 by zero exception. */
1583 if (code == RDIV_EXPR
1584 && REAL_VALUES_EQUAL (d2, dconst0)
1585 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1588 /* If either operand is a NaN, just return it. Otherwise, set up
1589 for floating-point trap; we return an overflow. */
1590 if (REAL_VALUE_ISNAN (d1))
1592 else if (REAL_VALUE_ISNAN (d2))
1595 inexact = real_arithmetic (&value, code, &d1, &d2);
1596 real_convert (&result, mode, &value);
1598 /* Don't constant fold this floating point operation if
1599 the result has overflowed and flag_trapping_math. */
1601 if (flag_trapping_math
1602 && MODE_HAS_INFINITIES (mode)
1603 && REAL_VALUE_ISINF (result)
1604 && !REAL_VALUE_ISINF (d1)
1605 && !REAL_VALUE_ISINF (d2))
1608 /* Don't constant fold this floating point operation if the
1609 result may dependent upon the run-time rounding mode and
1610 flag_rounding_math is set, or if GCC's software emulation
1611 is unable to accurately represent the result. */
1613 if ((flag_rounding_math
1614 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1615 && !flag_unsafe_math_optimizations))
1616 && (inexact || !real_identical (&result, &value)))
1619 t = build_real (type, result);
1621 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1622 TREE_CONSTANT_OVERFLOW (t)
1624 | TREE_CONSTANT_OVERFLOW (arg1)
1625 | TREE_CONSTANT_OVERFLOW (arg2);
1629 if (TREE_CODE (arg1) == COMPLEX_CST)
1631 tree type = TREE_TYPE (arg1);
1632 tree r1 = TREE_REALPART (arg1);
1633 tree i1 = TREE_IMAGPART (arg1);
1634 tree r2 = TREE_REALPART (arg2);
1635 tree i2 = TREE_IMAGPART (arg2);
1641 t = build_complex (type,
1642 const_binop (PLUS_EXPR, r1, r2, notrunc),
1643 const_binop (PLUS_EXPR, i1, i2, notrunc));
1647 t = build_complex (type,
1648 const_binop (MINUS_EXPR, r1, r2, notrunc),
1649 const_binop (MINUS_EXPR, i1, i2, notrunc));
1653 t = build_complex (type,
1654 const_binop (MINUS_EXPR,
1655 const_binop (MULT_EXPR,
1657 const_binop (MULT_EXPR,
1660 const_binop (PLUS_EXPR,
1661 const_binop (MULT_EXPR,
1663 const_binop (MULT_EXPR,
1670 tree t1, t2, real, imag;
1672 = const_binop (PLUS_EXPR,
1673 const_binop (MULT_EXPR, r2, r2, notrunc),
1674 const_binop (MULT_EXPR, i2, i2, notrunc),
1677 t1 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r1, r2, notrunc),
1679 const_binop (MULT_EXPR, i1, i2, notrunc),
1681 t2 = const_binop (MINUS_EXPR,
1682 const_binop (MULT_EXPR, i1, r2, notrunc),
1683 const_binop (MULT_EXPR, r1, i2, notrunc),
1686 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1688 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1689 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1693 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1699 t = build_complex (type, real, imag);
1711 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1712 indicates which particular sizetype to create. */
1715 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1717 return build_int_cst (sizetype_tab[(int) kind], number);
1720 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1721 is a tree code. The type of the result is taken from the operands.
1722 Both must be the same type integer type and it must be a size type.
1723 If the operands are constant, so is the result. */
1726 size_binop (enum tree_code code, tree arg0, tree arg1)
1728 tree type = TREE_TYPE (arg0);
1730 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1731 && type == TREE_TYPE (arg1));
1733 /* Handle the special case of two integer constants faster. */
1734 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1736 /* And some specific cases even faster than that. */
1737 if (code == PLUS_EXPR && integer_zerop (arg0))
1739 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1740 && integer_zerop (arg1))
1742 else if (code == MULT_EXPR && integer_onep (arg0))
1745 /* Handle general case of two integer constants. */
1746 return int_const_binop (code, arg0, arg1, 0);
1749 if (arg0 == error_mark_node || arg1 == error_mark_node)
1750 return error_mark_node;
1752 return fold_build2 (code, type, arg0, arg1);
1755 /* Given two values, either both of sizetype or both of bitsizetype,
1756 compute the difference between the two values. Return the value
1757 in signed type corresponding to the type of the operands. */
1760 size_diffop (tree arg0, tree arg1)
1762 tree type = TREE_TYPE (arg0);
1765 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1766 && type == TREE_TYPE (arg1));
1768 /* If the type is already signed, just do the simple thing. */
1769 if (!TYPE_UNSIGNED (type))
1770 return size_binop (MINUS_EXPR, arg0, arg1);
1772 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1774 /* If either operand is not a constant, do the conversions to the signed
1775 type and subtract. The hardware will do the right thing with any
1776 overflow in the subtraction. */
1777 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1778 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1779 fold_convert (ctype, arg1));
1781 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1782 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1783 overflow) and negate (which can't either). Special-case a result
1784 of zero while we're here. */
1785 if (tree_int_cst_equal (arg0, arg1))
1786 return build_int_cst (ctype, 0);
1787 else if (tree_int_cst_lt (arg1, arg0))
1788 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1790 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1791 fold_convert (ctype, size_binop (MINUS_EXPR,
1795 /* A subroutine of fold_convert_const handling conversions of an
1796 INTEGER_CST to another integer type. */
1799 fold_convert_const_int_from_int (tree type, tree arg1)
1803 /* Given an integer constant, make new constant with new type,
1804 appropriately sign-extended or truncated. */
1805 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1806 TREE_INT_CST_HIGH (arg1));
1808 t = force_fit_type (t,
1809 /* Don't set the overflow when
1810 converting a pointer */
1811 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1812 (TREE_INT_CST_HIGH (arg1) < 0
1813 && (TYPE_UNSIGNED (type)
1814 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1815 | TREE_OVERFLOW (arg1),
1816 TREE_CONSTANT_OVERFLOW (arg1));
1821 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1822 to an integer type. */
1825 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1830 /* The following code implements the floating point to integer
1831 conversion rules required by the Java Language Specification,
1832 that IEEE NaNs are mapped to zero and values that overflow
1833 the target precision saturate, i.e. values greater than
1834 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1835 are mapped to INT_MIN. These semantics are allowed by the
1836 C and C++ standards that simply state that the behavior of
1837 FP-to-integer conversion is unspecified upon overflow. */
1839 HOST_WIDE_INT high, low;
1841 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1845 case FIX_TRUNC_EXPR:
1846 real_trunc (&r, VOIDmode, &x);
1850 real_ceil (&r, VOIDmode, &x);
1853 case FIX_FLOOR_EXPR:
1854 real_floor (&r, VOIDmode, &x);
1857 case FIX_ROUND_EXPR:
1858 real_round (&r, VOIDmode, &x);
1865 /* If R is NaN, return zero and show we have an overflow. */
1866 if (REAL_VALUE_ISNAN (r))
1873 /* See if R is less than the lower bound or greater than the
1878 tree lt = TYPE_MIN_VALUE (type);
1879 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1880 if (REAL_VALUES_LESS (r, l))
1883 high = TREE_INT_CST_HIGH (lt);
1884 low = TREE_INT_CST_LOW (lt);
1890 tree ut = TYPE_MAX_VALUE (type);
1893 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1894 if (REAL_VALUES_LESS (u, r))
1897 high = TREE_INT_CST_HIGH (ut);
1898 low = TREE_INT_CST_LOW (ut);
1904 REAL_VALUE_TO_INT (&low, &high, r);
1906 t = build_int_cst_wide (type, low, high);
1908 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1909 TREE_CONSTANT_OVERFLOW (arg1));
1913 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1914 to another floating point type. */
1917 fold_convert_const_real_from_real (tree type, tree arg1)
1919 REAL_VALUE_TYPE value;
1922 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1923 t = build_real (type, value);
1925 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1926 TREE_CONSTANT_OVERFLOW (t)
1927 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1931 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1932 type TYPE. If no simplification can be done return NULL_TREE. */
1935 fold_convert_const (enum tree_code code, tree type, tree arg1)
1937 if (TREE_TYPE (arg1) == type)
1940 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1942 if (TREE_CODE (arg1) == INTEGER_CST)
1943 return fold_convert_const_int_from_int (type, arg1);
1944 else if (TREE_CODE (arg1) == REAL_CST)
1945 return fold_convert_const_int_from_real (code, type, arg1);
1947 else if (TREE_CODE (type) == REAL_TYPE)
1949 if (TREE_CODE (arg1) == INTEGER_CST)
1950 return build_real_from_int_cst (type, arg1);
1951 if (TREE_CODE (arg1) == REAL_CST)
1952 return fold_convert_const_real_from_real (type, arg1);
1957 /* Construct a vector of zero elements of vector type TYPE. */
1960 build_zero_vector (tree type)
1965 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1966 units = TYPE_VECTOR_SUBPARTS (type);
1969 for (i = 0; i < units; i++)
1970 list = tree_cons (NULL_TREE, elem, list);
1971 return build_vector (type, list);
1974 /* Convert expression ARG to type TYPE. Used by the middle-end for
1975 simple conversions in preference to calling the front-end's convert. */
1978 fold_convert (tree type, tree arg)
1980 tree orig = TREE_TYPE (arg);
1986 if (TREE_CODE (arg) == ERROR_MARK
1987 || TREE_CODE (type) == ERROR_MARK
1988 || TREE_CODE (orig) == ERROR_MARK)
1989 return error_mark_node;
1991 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1992 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1993 TYPE_MAIN_VARIANT (orig)))
1994 return fold_build1 (NOP_EXPR, type, arg);
1996 switch (TREE_CODE (type))
1998 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1999 case POINTER_TYPE: case REFERENCE_TYPE:
2001 if (TREE_CODE (arg) == INTEGER_CST)
2003 tem = fold_convert_const (NOP_EXPR, type, arg);
2004 if (tem != NULL_TREE)
2007 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2008 || TREE_CODE (orig) == OFFSET_TYPE)
2009 return fold_build1 (NOP_EXPR, type, arg);
2010 if (TREE_CODE (orig) == COMPLEX_TYPE)
2012 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2013 return fold_convert (type, tem);
2015 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2016 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 return fold_build1 (NOP_EXPR, type, arg);
2020 if (TREE_CODE (arg) == INTEGER_CST)
2022 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2023 if (tem != NULL_TREE)
2026 else if (TREE_CODE (arg) == REAL_CST)
2028 tem = fold_convert_const (NOP_EXPR, type, arg);
2029 if (tem != NULL_TREE)
2033 switch (TREE_CODE (orig))
2035 case INTEGER_TYPE: case CHAR_TYPE:
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 return fold_build1 (FLOAT_EXPR, type, arg);
2041 return fold_build1 (NOP_EXPR, type, arg);
2044 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2045 return fold_convert (type, tem);
2052 switch (TREE_CODE (orig))
2054 case INTEGER_TYPE: case CHAR_TYPE:
2055 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2056 case POINTER_TYPE: case REFERENCE_TYPE:
2058 return build2 (COMPLEX_EXPR, type,
2059 fold_convert (TREE_TYPE (type), arg),
2060 fold_convert (TREE_TYPE (type), integer_zero_node));
2065 if (TREE_CODE (arg) == COMPLEX_EXPR)
2067 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2068 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2069 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2072 arg = save_expr (arg);
2073 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2074 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2075 rpart = fold_convert (TREE_TYPE (type), rpart);
2076 ipart = fold_convert (TREE_TYPE (type), ipart);
2077 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 if (integer_zerop (arg))
2086 return build_zero_vector (type);
2087 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2088 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2089 || TREE_CODE (orig) == VECTOR_TYPE);
2090 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2093 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2100 /* Return false if expr can be assumed not to be an lvalue, true
2104 maybe_lvalue_p (tree x)
2106 /* We only need to wrap lvalue tree codes. */
2107 switch (TREE_CODE (x))
2118 case ALIGN_INDIRECT_REF:
2119 case MISALIGNED_INDIRECT_REF:
2121 case ARRAY_RANGE_REF:
2127 case PREINCREMENT_EXPR:
2128 case PREDECREMENT_EXPR:
2130 case TRY_CATCH_EXPR:
2131 case WITH_CLEANUP_EXPR:
2142 /* Assume the worst for front-end tree codes. */
2143 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2151 /* Return an expr equal to X but certainly not valid as an lvalue. */
2156 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2161 if (! maybe_lvalue_p (x))
2163 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2166 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2167 Zero means allow extended lvalues. */
2169 int pedantic_lvalues;
2171 /* When pedantic, return an expr equal to X but certainly not valid as a
2172 pedantic lvalue. Otherwise, return X. */
2175 pedantic_non_lvalue (tree x)
2177 if (pedantic_lvalues)
2178 return non_lvalue (x);
2183 /* Given a tree comparison code, return the code that is the logical inverse
2184 of the given code. It is not safe to do this for floating-point
2185 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2186 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2189 invert_tree_comparison (enum tree_code code, bool honor_nans)
2191 if (honor_nans && flag_trapping_math)
2201 return honor_nans ? UNLE_EXPR : LE_EXPR;
2203 return honor_nans ? UNLT_EXPR : LT_EXPR;
2205 return honor_nans ? UNGE_EXPR : GE_EXPR;
2207 return honor_nans ? UNGT_EXPR : GT_EXPR;
2221 return UNORDERED_EXPR;
2222 case UNORDERED_EXPR:
2223 return ORDERED_EXPR;
2229 /* Similar, but return the comparison that results if the operands are
2230 swapped. This is safe for floating-point. */
2233 swap_tree_comparison (enum tree_code code)
2240 case UNORDERED_EXPR:
2266 /* Convert a comparison tree code from an enum tree_code representation
2267 into a compcode bit-based encoding. This function is the inverse of
2268 compcode_to_comparison. */
2270 static enum comparison_code
2271 comparison_to_compcode (enum tree_code code)
2288 return COMPCODE_ORD;
2289 case UNORDERED_EXPR:
2290 return COMPCODE_UNORD;
2292 return COMPCODE_UNLT;
2294 return COMPCODE_UNEQ;
2296 return COMPCODE_UNLE;
2298 return COMPCODE_UNGT;
2300 return COMPCODE_LTGT;
2302 return COMPCODE_UNGE;
2308 /* Convert a compcode bit-based encoding of a comparison operator back
2309 to GCC's enum tree_code representation. This function is the
2310 inverse of comparison_to_compcode. */
2312 static enum tree_code
2313 compcode_to_comparison (enum comparison_code code)
2330 return ORDERED_EXPR;
2331 case COMPCODE_UNORD:
2332 return UNORDERED_EXPR;
2350 /* Return a tree for the comparison which is the combination of
2351 doing the AND or OR (depending on CODE) of the two operations LCODE
2352 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2353 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2354 if this makes the transformation invalid. */
2357 combine_comparisons (enum tree_code code, enum tree_code lcode,
2358 enum tree_code rcode, tree truth_type,
2359 tree ll_arg, tree lr_arg)
2361 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2362 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2363 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2364 enum comparison_code compcode;
2368 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2369 compcode = lcompcode & rcompcode;
2372 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2373 compcode = lcompcode | rcompcode;
2382 /* Eliminate unordered comparisons, as well as LTGT and ORD
2383 which are not used unless the mode has NaNs. */
2384 compcode &= ~COMPCODE_UNORD;
2385 if (compcode == COMPCODE_LTGT)
2386 compcode = COMPCODE_NE;
2387 else if (compcode == COMPCODE_ORD)
2388 compcode = COMPCODE_TRUE;
2390 else if (flag_trapping_math)
2392 /* Check that the original operation and the optimized ones will trap
2393 under the same condition. */
2394 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2395 && (lcompcode != COMPCODE_EQ)
2396 && (lcompcode != COMPCODE_ORD);
2397 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2398 && (rcompcode != COMPCODE_EQ)
2399 && (rcompcode != COMPCODE_ORD);
2400 bool trap = (compcode & COMPCODE_UNORD) == 0
2401 && (compcode != COMPCODE_EQ)
2402 && (compcode != COMPCODE_ORD);
2404 /* In a short-circuited boolean expression the LHS might be
2405 such that the RHS, if evaluated, will never trap. For
2406 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2407 if neither x nor y is NaN. (This is a mixed blessing: for
2408 example, the expression above will never trap, hence
2409 optimizing it to x < y would be invalid). */
2410 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2411 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2414 /* If the comparison was short-circuited, and only the RHS
2415 trapped, we may now generate a spurious trap. */
2417 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2420 /* If we changed the conditions that cause a trap, we lose. */
2421 if ((ltrap || rtrap) != trap)
2425 if (compcode == COMPCODE_TRUE)
2426 return constant_boolean_node (true, truth_type);
2427 else if (compcode == COMPCODE_FALSE)
2428 return constant_boolean_node (false, truth_type);
2430 return fold_build2 (compcode_to_comparison (compcode),
2431 truth_type, ll_arg, lr_arg);
2434 /* Return nonzero if CODE is a tree code that represents a truth value. */
2437 truth_value_p (enum tree_code code)
2439 return (TREE_CODE_CLASS (code) == tcc_comparison
2440 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2441 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2442 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2445 /* Return nonzero if two operands (typically of the same tree node)
2446 are necessarily equal. If either argument has side-effects this
2447 function returns zero. FLAGS modifies behavior as follows:
2449 If OEP_ONLY_CONST is set, only return nonzero for constants.
2450 This function tests whether the operands are indistinguishable;
2451 it does not test whether they are equal using C's == operation.
2452 The distinction is important for IEEE floating point, because
2453 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2454 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2456 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2457 even though it may hold multiple values during a function.
2458 This is because a GCC tree node guarantees that nothing else is
2459 executed between the evaluation of its "operands" (which may often
2460 be evaluated in arbitrary order). Hence if the operands themselves
2461 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2462 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2463 unset means assuming isochronic (or instantaneous) tree equivalence.
2464 Unless comparing arbitrary expression trees, such as from different
2465 statements, this flag can usually be left unset.
2467 If OEP_PURE_SAME is set, then pure functions with identical arguments
2468 are considered the same. It is used when the caller has other ways
2469 to ensure that global memory is unchanged in between. */
2472 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2474 /* If either is ERROR_MARK, they aren't equal. */
2475 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2478 /* If both types don't have the same signedness, then we can't consider
2479 them equal. We must check this before the STRIP_NOPS calls
2480 because they may change the signedness of the arguments. */
2481 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2487 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2488 /* This is needed for conversions and for COMPONENT_REF.
2489 Might as well play it safe and always test this. */
2490 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2491 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2492 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2495 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2496 We don't care about side effects in that case because the SAVE_EXPR
2497 takes care of that for us. In all other cases, two expressions are
2498 equal if they have no side effects. If we have two identical
2499 expressions with side effects that should be treated the same due
2500 to the only side effects being identical SAVE_EXPR's, that will
2501 be detected in the recursive calls below. */
2502 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2503 && (TREE_CODE (arg0) == SAVE_EXPR
2504 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2507 /* Next handle constant cases, those for which we can return 1 even
2508 if ONLY_CONST is set. */
2509 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2510 switch (TREE_CODE (arg0))
2513 return (! TREE_CONSTANT_OVERFLOW (arg0)
2514 && ! TREE_CONSTANT_OVERFLOW (arg1)
2515 && tree_int_cst_equal (arg0, arg1));
2518 return (! TREE_CONSTANT_OVERFLOW (arg0)
2519 && ! TREE_CONSTANT_OVERFLOW (arg1)
2520 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2521 TREE_REAL_CST (arg1)));
2527 if (TREE_CONSTANT_OVERFLOW (arg0)
2528 || TREE_CONSTANT_OVERFLOW (arg1))
2531 v1 = TREE_VECTOR_CST_ELTS (arg0);
2532 v2 = TREE_VECTOR_CST_ELTS (arg1);
2535 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2538 v1 = TREE_CHAIN (v1);
2539 v2 = TREE_CHAIN (v2);
2546 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2548 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2552 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2553 && ! memcmp (TREE_STRING_POINTER (arg0),
2554 TREE_STRING_POINTER (arg1),
2555 TREE_STRING_LENGTH (arg0)));
2558 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2564 if (flags & OEP_ONLY_CONST)
2567 /* Define macros to test an operand from arg0 and arg1 for equality and a
2568 variant that allows null and views null as being different from any
2569 non-null value. In the latter case, if either is null, the both
2570 must be; otherwise, do the normal comparison. */
2571 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2572 TREE_OPERAND (arg1, N), flags)
2574 #define OP_SAME_WITH_NULL(N) \
2575 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2576 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2578 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2581 /* Two conversions are equal only if signedness and modes match. */
2582 switch (TREE_CODE (arg0))
2587 case FIX_TRUNC_EXPR:
2588 case FIX_FLOOR_EXPR:
2589 case FIX_ROUND_EXPR:
2590 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2591 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2601 case tcc_comparison:
2603 if (OP_SAME (0) && OP_SAME (1))
2606 /* For commutative ops, allow the other order. */
2607 return (commutative_tree_code (TREE_CODE (arg0))
2608 && operand_equal_p (TREE_OPERAND (arg0, 0),
2609 TREE_OPERAND (arg1, 1), flags)
2610 && operand_equal_p (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 0), flags));
2614 /* If either of the pointer (or reference) expressions we are
2615 dereferencing contain a side effect, these cannot be equal. */
2616 if (TREE_SIDE_EFFECTS (arg0)
2617 || TREE_SIDE_EFFECTS (arg1))
2620 switch (TREE_CODE (arg0))
2623 case ALIGN_INDIRECT_REF:
2624 case MISALIGNED_INDIRECT_REF:
2630 case ARRAY_RANGE_REF:
2631 /* Operands 2 and 3 may be null. */
2634 && OP_SAME_WITH_NULL (2)
2635 && OP_SAME_WITH_NULL (3));
2638 /* Handle operand 2 the same as for ARRAY_REF. */
2639 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2642 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2648 case tcc_expression:
2649 switch (TREE_CODE (arg0))
2652 case TRUTH_NOT_EXPR:
2655 case TRUTH_ANDIF_EXPR:
2656 case TRUTH_ORIF_EXPR:
2657 return OP_SAME (0) && OP_SAME (1);
2659 case TRUTH_AND_EXPR:
2661 case TRUTH_XOR_EXPR:
2662 if (OP_SAME (0) && OP_SAME (1))
2665 /* Otherwise take into account this is a commutative operation. */
2666 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2667 TREE_OPERAND (arg1, 1), flags)
2668 && operand_equal_p (TREE_OPERAND (arg0, 1),
2669 TREE_OPERAND (arg1, 0), flags));
2672 /* If the CALL_EXPRs call different functions, then they
2673 clearly can not be equal. */
2678 unsigned int cef = call_expr_flags (arg0);
2679 if (flags & OEP_PURE_SAME)
2680 cef &= ECF_CONST | ECF_PURE;
2687 /* Now see if all the arguments are the same. operand_equal_p
2688 does not handle TREE_LIST, so we walk the operands here
2689 feeding them to operand_equal_p. */
2690 arg0 = TREE_OPERAND (arg0, 1);
2691 arg1 = TREE_OPERAND (arg1, 1);
2692 while (arg0 && arg1)
2694 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2698 arg0 = TREE_CHAIN (arg0);
2699 arg1 = TREE_CHAIN (arg1);
2702 /* If we get here and both argument lists are exhausted
2703 then the CALL_EXPRs are equal. */
2704 return ! (arg0 || arg1);
2710 case tcc_declaration:
2711 /* Consider __builtin_sqrt equal to sqrt. */
2712 return (TREE_CODE (arg0) == FUNCTION_DECL
2713 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2714 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2715 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2722 #undef OP_SAME_WITH_NULL
2725 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2726 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2728 When in doubt, return 0. */
2731 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2733 int unsignedp1, unsignedpo;
2734 tree primarg0, primarg1, primother;
2735 unsigned int correct_width;
2737 if (operand_equal_p (arg0, arg1, 0))
2740 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2741 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2744 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2745 and see if the inner values are the same. This removes any
2746 signedness comparison, which doesn't matter here. */
2747 primarg0 = arg0, primarg1 = arg1;
2748 STRIP_NOPS (primarg0);
2749 STRIP_NOPS (primarg1);
2750 if (operand_equal_p (primarg0, primarg1, 0))
2753 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2754 actual comparison operand, ARG0.
2756 First throw away any conversions to wider types
2757 already present in the operands. */
2759 primarg1 = get_narrower (arg1, &unsignedp1);
2760 primother = get_narrower (other, &unsignedpo);
2762 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2763 if (unsignedp1 == unsignedpo
2764 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2765 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2767 tree type = TREE_TYPE (arg0);
2769 /* Make sure shorter operand is extended the right way
2770 to match the longer operand. */
2771 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2772 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2774 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2781 /* See if ARG is an expression that is either a comparison or is performing
2782 arithmetic on comparisons. The comparisons must only be comparing
2783 two different values, which will be stored in *CVAL1 and *CVAL2; if
2784 they are nonzero it means that some operands have already been found.
2785 No variables may be used anywhere else in the expression except in the
2786 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2787 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2789 If this is true, return 1. Otherwise, return zero. */
2792 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2794 enum tree_code code = TREE_CODE (arg);
2795 enum tree_code_class class = TREE_CODE_CLASS (code);
2797 /* We can handle some of the tcc_expression cases here. */
2798 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2800 else if (class == tcc_expression
2801 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2802 || code == COMPOUND_EXPR))
2805 else if (class == tcc_expression && code == SAVE_EXPR
2806 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2808 /* If we've already found a CVAL1 or CVAL2, this expression is
2809 two complex to handle. */
2810 if (*cval1 || *cval2)
2820 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2823 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2824 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2825 cval1, cval2, save_p));
2830 case tcc_expression:
2831 if (code == COND_EXPR)
2832 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2833 cval1, cval2, save_p)
2834 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2835 cval1, cval2, save_p)
2836 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2837 cval1, cval2, save_p));
2840 case tcc_comparison:
2841 /* First see if we can handle the first operand, then the second. For
2842 the second operand, we know *CVAL1 can't be zero. It must be that
2843 one side of the comparison is each of the values; test for the
2844 case where this isn't true by failing if the two operands
2847 if (operand_equal_p (TREE_OPERAND (arg, 0),
2848 TREE_OPERAND (arg, 1), 0))
2852 *cval1 = TREE_OPERAND (arg, 0);
2853 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2855 else if (*cval2 == 0)
2856 *cval2 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2862 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2864 else if (*cval2 == 0)
2865 *cval2 = TREE_OPERAND (arg, 1);
2866 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2878 /* ARG is a tree that is known to contain just arithmetic operations and
2879 comparisons. Evaluate the operations in the tree substituting NEW0 for
2880 any occurrence of OLD0 as an operand of a comparison and likewise for
2884 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2886 tree type = TREE_TYPE (arg);
2887 enum tree_code code = TREE_CODE (arg);
2888 enum tree_code_class class = TREE_CODE_CLASS (code);
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2893 else if (class == tcc_expression
2894 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2900 return fold_build1 (code, type,
2901 eval_subst (TREE_OPERAND (arg, 0),
2902 old0, new0, old1, new1));
2905 return fold_build2 (code, type,
2906 eval_subst (TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1),
2908 eval_subst (TREE_OPERAND (arg, 1),
2909 old0, new0, old1, new1));
2911 case tcc_expression:
2915 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2918 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2921 return fold_build3 (code, type,
2922 eval_subst (TREE_OPERAND (arg, 0),
2923 old0, new0, old1, new1),
2924 eval_subst (TREE_OPERAND (arg, 1),
2925 old0, new0, old1, new1),
2926 eval_subst (TREE_OPERAND (arg, 2),
2927 old0, new0, old1, new1));
2931 /* Fall through - ??? */
2933 case tcc_comparison:
2935 tree arg0 = TREE_OPERAND (arg, 0);
2936 tree arg1 = TREE_OPERAND (arg, 1);
2938 /* We need to check both for exact equality and tree equality. The
2939 former will be true if the operand has a side-effect. In that
2940 case, we know the operand occurred exactly once. */
2942 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2944 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2947 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2949 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2952 return fold_build2 (code, type, arg0, arg1);
2960 /* Return a tree for the case when the result of an expression is RESULT
2961 converted to TYPE and OMITTED was previously an operand of the expression
2962 but is now not needed (e.g., we folded OMITTED * 0).
2964 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2965 the conversion of RESULT to TYPE. */
2968 omit_one_operand (tree type, tree result, tree omitted)
2970 tree t = fold_convert (type, result);
2972 if (TREE_SIDE_EFFECTS (omitted))
2973 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2975 return non_lvalue (t);
2978 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2981 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2983 tree t = fold_convert (type, result);
2985 if (TREE_SIDE_EFFECTS (omitted))
2986 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2988 return pedantic_non_lvalue (t);
2991 /* Return a tree for the case when the result of an expression is RESULT
2992 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2993 of the expression but are now not needed.
2995 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2996 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2997 evaluated before OMITTED2. Otherwise, if neither has side effects,
2998 just do the conversion of RESULT to TYPE. */
3001 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3003 tree t = fold_convert (type, result);
3005 if (TREE_SIDE_EFFECTS (omitted2))
3006 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3007 if (TREE_SIDE_EFFECTS (omitted1))
3008 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3010 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3014 /* Return a simplified tree node for the truth-negation of ARG. This
3015 never alters ARG itself. We assume that ARG is an operation that
3016 returns a truth value (0 or 1).
3018 FIXME: one would think we would fold the result, but it causes
3019 problems with the dominator optimizer. */
3021 invert_truthvalue (tree arg)
3023 tree type = TREE_TYPE (arg);
3024 enum tree_code code = TREE_CODE (arg);
3026 if (code == ERROR_MARK)
3029 /* If this is a comparison, we can simply invert it, except for
3030 floating-point non-equality comparisons, in which case we just
3031 enclose a TRUTH_NOT_EXPR around what we have. */
3033 if (TREE_CODE_CLASS (code) == tcc_comparison)
3035 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3036 if (FLOAT_TYPE_P (op_type)
3037 && flag_trapping_math
3038 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3039 && code != NE_EXPR && code != EQ_EXPR)
3040 return build1 (TRUTH_NOT_EXPR, type, arg);
3043 code = invert_tree_comparison (code,
3044 HONOR_NANS (TYPE_MODE (op_type)));
3045 if (code == ERROR_MARK)
3046 return build1 (TRUTH_NOT_EXPR, type, arg);
3048 return build2 (code, type,
3049 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3056 return constant_boolean_node (integer_zerop (arg), type);
3058 case TRUTH_AND_EXPR:
3059 return build2 (TRUTH_OR_EXPR, type,
3060 invert_truthvalue (TREE_OPERAND (arg, 0)),
3061 invert_truthvalue (TREE_OPERAND (arg, 1)));
3064 return build2 (TRUTH_AND_EXPR, type,
3065 invert_truthvalue (TREE_OPERAND (arg, 0)),
3066 invert_truthvalue (TREE_OPERAND (arg, 1)));
3068 case TRUTH_XOR_EXPR:
3069 /* Here we can invert either operand. We invert the first operand
3070 unless the second operand is a TRUTH_NOT_EXPR in which case our
3071 result is the XOR of the first operand with the inside of the
3072 negation of the second operand. */
3074 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3075 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3076 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3078 return build2 (TRUTH_XOR_EXPR, type,
3079 invert_truthvalue (TREE_OPERAND (arg, 0)),
3080 TREE_OPERAND (arg, 1));
3082 case TRUTH_ANDIF_EXPR:
3083 return build2 (TRUTH_ORIF_EXPR, type,
3084 invert_truthvalue (TREE_OPERAND (arg, 0)),
3085 invert_truthvalue (TREE_OPERAND (arg, 1)));
3087 case TRUTH_ORIF_EXPR:
3088 return build2 (TRUTH_ANDIF_EXPR, type,
3089 invert_truthvalue (TREE_OPERAND (arg, 0)),
3090 invert_truthvalue (TREE_OPERAND (arg, 1)));
3092 case TRUTH_NOT_EXPR:
3093 return TREE_OPERAND (arg, 0);
3097 tree arg1 = TREE_OPERAND (arg, 1);
3098 tree arg2 = TREE_OPERAND (arg, 2);
3099 /* A COND_EXPR may have a throw as one operand, which
3100 then has void type. Just leave void operands
3102 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3103 VOID_TYPE_P (TREE_TYPE (arg1))
3104 ? arg1 : invert_truthvalue (arg1),
3105 VOID_TYPE_P (TREE_TYPE (arg2))
3106 ? arg2 : invert_truthvalue (arg2));
3110 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3111 invert_truthvalue (TREE_OPERAND (arg, 1)));
3113 case NON_LVALUE_EXPR:
3114 return invert_truthvalue (TREE_OPERAND (arg, 0));
3117 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3122 return build1 (TREE_CODE (arg), type,
3123 invert_truthvalue (TREE_OPERAND (arg, 0)));
3126 if (!integer_onep (TREE_OPERAND (arg, 1)))
3128 return build2 (EQ_EXPR, type, arg,
3129 build_int_cst (type, 0));
3132 return build1 (TRUTH_NOT_EXPR, type, arg);
3134 case CLEANUP_POINT_EXPR:
3135 return build1 (CLEANUP_POINT_EXPR, type,
3136 invert_truthvalue (TREE_OPERAND (arg, 0)));
3141 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3142 return build1 (TRUTH_NOT_EXPR, type, arg);
3145 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3146 operands are another bit-wise operation with a common input. If so,
3147 distribute the bit operations to save an operation and possibly two if
3148 constants are involved. For example, convert
3149 (A | B) & (A | C) into A | (B & C)
3150 Further simplification will occur if B and C are constants.
3152 If this optimization cannot be done, 0 will be returned. */
3155 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3160 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3161 || TREE_CODE (arg0) == code
3162 || (TREE_CODE (arg0) != BIT_AND_EXPR
3163 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3166 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3168 common = TREE_OPERAND (arg0, 0);
3169 left = TREE_OPERAND (arg0, 1);
3170 right = TREE_OPERAND (arg1, 1);
3172 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3174 common = TREE_OPERAND (arg0, 0);
3175 left = TREE_OPERAND (arg0, 1);
3176 right = TREE_OPERAND (arg1, 0);
3178 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3180 common = TREE_OPERAND (arg0, 1);
3181 left = TREE_OPERAND (arg0, 0);
3182 right = TREE_OPERAND (arg1, 1);
3184 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3186 common = TREE_OPERAND (arg0, 1);
3187 left = TREE_OPERAND (arg0, 0);
3188 right = TREE_OPERAND (arg1, 0);
3193 return fold_build2 (TREE_CODE (arg0), type, common,
3194 fold_build2 (code, type, left, right));
3197 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3198 with code CODE. This optimization is unsafe. */
3200 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3202 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3203 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3205 /* (A / C) +- (B / C) -> (A +- B) / C. */
3207 && operand_equal_p (TREE_OPERAND (arg0, 1),
3208 TREE_OPERAND (arg1, 1), 0))
3209 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3210 fold_build2 (code, type,
3211 TREE_OPERAND (arg0, 0),
3212 TREE_OPERAND (arg1, 0)),
3213 TREE_OPERAND (arg0, 1));
3215 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3216 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3217 TREE_OPERAND (arg1, 0), 0)
3218 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3219 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3221 REAL_VALUE_TYPE r0, r1;
3222 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3223 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3225 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3227 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3228 real_arithmetic (&r0, code, &r0, &r1);
3229 return fold_build2 (MULT_EXPR, type,
3230 TREE_OPERAND (arg0, 0),
3231 build_real (type, r0));
3237 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3238 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3241 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3248 tree size = TYPE_SIZE (TREE_TYPE (inner));
3249 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3250 || POINTER_TYPE_P (TREE_TYPE (inner)))
3251 && host_integerp (size, 0)
3252 && tree_low_cst (size, 0) == bitsize)
3253 return fold_convert (type, inner);
3256 result = build3 (BIT_FIELD_REF, type, inner,
3257 size_int (bitsize), bitsize_int (bitpos));
3259 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3264 /* Optimize a bit-field compare.
3266 There are two cases: First is a compare against a constant and the
3267 second is a comparison of two items where the fields are at the same
3268 bit position relative to the start of a chunk (byte, halfword, word)
3269 large enough to contain it. In these cases we can avoid the shift
3270 implicit in bitfield extractions.
3272 For constants, we emit a compare of the shifted constant with the
3273 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3274 compared. For two fields at the same position, we do the ANDs with the
3275 similar mask and compare the result of the ANDs.
3277 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3278 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3279 are the left and right operands of the comparison, respectively.
3281 If the optimization described above can be done, we return the resulting
3282 tree. Otherwise we return zero. */
3285 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3288 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3289 tree type = TREE_TYPE (lhs);
3290 tree signed_type, unsigned_type;
3291 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3292 enum machine_mode lmode, rmode, nmode;
3293 int lunsignedp, runsignedp;
3294 int lvolatilep = 0, rvolatilep = 0;
3295 tree linner, rinner = NULL_TREE;
3299 /* Get all the information about the extractions being done. If the bit size
3300 if the same as the size of the underlying object, we aren't doing an
3301 extraction at all and so can do nothing. We also don't want to
3302 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3303 then will no longer be able to replace it. */
3304 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3305 &lunsignedp, &lvolatilep, false);
3306 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3307 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3312 /* If this is not a constant, we can only do something if bit positions,
3313 sizes, and signedness are the same. */
3314 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3315 &runsignedp, &rvolatilep, false);
3317 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3318 || lunsignedp != runsignedp || offset != 0
3319 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3323 /* See if we can find a mode to refer to this field. We should be able to,
3324 but fail if we can't. */
3325 nmode = get_best_mode (lbitsize, lbitpos,
3326 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3327 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3328 TYPE_ALIGN (TREE_TYPE (rinner))),
3329 word_mode, lvolatilep || rvolatilep);
3330 if (nmode == VOIDmode)
3333 /* Set signed and unsigned types of the precision of this mode for the
3335 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3336 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3338 /* Compute the bit position and size for the new reference and our offset
3339 within it. If the new reference is the same size as the original, we
3340 won't optimize anything, so return zero. */
3341 nbitsize = GET_MODE_BITSIZE (nmode);
3342 nbitpos = lbitpos & ~ (nbitsize - 1);
3344 if (nbitsize == lbitsize)
3347 if (BYTES_BIG_ENDIAN)
3348 lbitpos = nbitsize - lbitsize - lbitpos;
3350 /* Make the mask to be used against the extracted field. */
3351 mask = build_int_cst (unsigned_type, -1);
3352 mask = force_fit_type (mask, 0, false, false);
3353 mask = fold_convert (unsigned_type, mask);
3354 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3355 mask = const_binop (RSHIFT_EXPR, mask,
3356 size_int (nbitsize - lbitsize - lbitpos), 0);
3359 /* If not comparing with constant, just rework the comparison
3361 return build2 (code, compare_type,
3362 build2 (BIT_AND_EXPR, unsigned_type,
3363 make_bit_field_ref (linner, unsigned_type,
3364 nbitsize, nbitpos, 1),
3366 build2 (BIT_AND_EXPR, unsigned_type,
3367 make_bit_field_ref (rinner, unsigned_type,
3368 nbitsize, nbitpos, 1),
3371 /* Otherwise, we are handling the constant case. See if the constant is too
3372 big for the field. Warn and return a tree of for 0 (false) if so. We do
3373 this not only for its own sake, but to avoid having to test for this
3374 error case below. If we didn't, we might generate wrong code.
3376 For unsigned fields, the constant shifted right by the field length should
3377 be all zero. For signed fields, the high-order bits should agree with
3382 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3383 fold_convert (unsigned_type, rhs),
3384 size_int (lbitsize), 0)))
3386 warning (0, "comparison is always %d due to width of bit-field",
3388 return constant_boolean_node (code == NE_EXPR, compare_type);
3393 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3394 size_int (lbitsize - 1), 0);
3395 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3397 warning (0, "comparison is always %d due to width of bit-field",
3399 return constant_boolean_node (code == NE_EXPR, compare_type);
3403 /* Single-bit compares should always be against zero. */
3404 if (lbitsize == 1 && ! integer_zerop (rhs))
3406 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3407 rhs = build_int_cst (type, 0);
3410 /* Make a new bitfield reference, shift the constant over the
3411 appropriate number of bits and mask it with the computed mask
3412 (in case this was a signed field). If we changed it, make a new one. */
3413 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3416 TREE_SIDE_EFFECTS (lhs) = 1;
3417 TREE_THIS_VOLATILE (lhs) = 1;
3420 rhs = const_binop (BIT_AND_EXPR,
3421 const_binop (LSHIFT_EXPR,
3422 fold_convert (unsigned_type, rhs),
3423 size_int (lbitpos), 0),
3426 return build2 (code, compare_type,
3427 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3431 /* Subroutine for fold_truthop: decode a field reference.
3433 If EXP is a comparison reference, we return the innermost reference.
3435 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3436 set to the starting bit number.
3438 If the innermost field can be completely contained in a mode-sized
3439 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3441 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3442 otherwise it is not changed.
3444 *PUNSIGNEDP is set to the signedness of the field.
3446 *PMASK is set to the mask used. This is either contained in a
3447 BIT_AND_EXPR or derived from the width of the field.
3449 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3451 Return 0 if this is not a component reference or is one that we can't
3452 do anything with. */
3455 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3456 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3457 int *punsignedp, int *pvolatilep,
3458 tree *pmask, tree *pand_mask)
3460 tree outer_type = 0;
3462 tree mask, inner, offset;
3464 unsigned int precision;
3466 /* All the optimizations using this function assume integer fields.
3467 There are problems with FP fields since the type_for_size call
3468 below can fail for, e.g., XFmode. */
3469 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3472 /* We are interested in the bare arrangement of bits, so strip everything
3473 that doesn't affect the machine mode. However, record the type of the
3474 outermost expression if it may matter below. */
3475 if (TREE_CODE (exp) == NOP_EXPR
3476 || TREE_CODE (exp) == CONVERT_EXPR
3477 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3478 outer_type = TREE_TYPE (exp);
3481 if (TREE_CODE (exp) == BIT_AND_EXPR)
3483 and_mask = TREE_OPERAND (exp, 1);
3484 exp = TREE_OPERAND (exp, 0);
3485 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3486 if (TREE_CODE (and_mask) != INTEGER_CST)
3490 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3491 punsignedp, pvolatilep, false);
3492 if ((inner == exp && and_mask == 0)
3493 || *pbitsize < 0 || offset != 0
3494 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3497 /* If the number of bits in the reference is the same as the bitsize of
3498 the outer type, then the outer type gives the signedness. Otherwise
3499 (in case of a small bitfield) the signedness is unchanged. */
3500 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3501 *punsignedp = TYPE_UNSIGNED (outer_type);
3503 /* Compute the mask to access the bitfield. */
3504 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3505 precision = TYPE_PRECISION (unsigned_type);
3507 mask = build_int_cst (unsigned_type, -1);
3508 mask = force_fit_type (mask, 0, false, false);
3510 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3511 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3513 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3515 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3516 fold_convert (unsigned_type, and_mask), mask);
3519 *pand_mask = and_mask;
3523 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3527 all_ones_mask_p (tree mask, int size)
3529 tree type = TREE_TYPE (mask);
3530 unsigned int precision = TYPE_PRECISION (type);
3533 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3534 tmask = force_fit_type (tmask, 0, false, false);
3537 tree_int_cst_equal (mask,
3538 const_binop (RSHIFT_EXPR,
3539 const_binop (LSHIFT_EXPR, tmask,
3540 size_int (precision - size),
3542 size_int (precision - size), 0));
3545 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3546 represents the sign bit of EXP's type. If EXP represents a sign
3547 or zero extension, also test VAL against the unextended type.
3548 The return value is the (sub)expression whose sign bit is VAL,
3549 or NULL_TREE otherwise. */
3552 sign_bit_p (tree exp, tree val)
3554 unsigned HOST_WIDE_INT mask_lo, lo;
3555 HOST_WIDE_INT mask_hi, hi;
3559 /* Tree EXP must have an integral type. */
3560 t = TREE_TYPE (exp);
3561 if (! INTEGRAL_TYPE_P (t))
3564 /* Tree VAL must be an integer constant. */
3565 if (TREE_CODE (val) != INTEGER_CST
3566 || TREE_CONSTANT_OVERFLOW (val))
3569 width = TYPE_PRECISION (t);
3570 if (width > HOST_BITS_PER_WIDE_INT)
3572 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3575 mask_hi = ((unsigned HOST_WIDE_INT) -1
3576 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3582 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3585 mask_lo = ((unsigned HOST_WIDE_INT) -1
3586 >> (HOST_BITS_PER_WIDE_INT - width));
3589 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3590 treat VAL as if it were unsigned. */
3591 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3592 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3595 /* Handle extension from a narrower type. */
3596 if (TREE_CODE (exp) == NOP_EXPR
3597 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3598 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3603 /* Subroutine for fold_truthop: determine if an operand is simple enough
3604 to be evaluated unconditionally. */
3607 simple_operand_p (tree exp)
3609 /* Strip any conversions that don't change the machine mode. */
3612 return (CONSTANT_CLASS_P (exp)
3613 || TREE_CODE (exp) == SSA_NAME
3615 && ! TREE_ADDRESSABLE (exp)
3616 && ! TREE_THIS_VOLATILE (exp)
3617 && ! DECL_NONLOCAL (exp)
3618 /* Don't regard global variables as simple. They may be
3619 allocated in ways unknown to the compiler (shared memory,
3620 #pragma weak, etc). */
3621 && ! TREE_PUBLIC (exp)
3622 && ! DECL_EXTERNAL (exp)
3623 /* Loading a static variable is unduly expensive, but global
3624 registers aren't expensive. */
3625 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3628 /* The following functions are subroutines to fold_range_test and allow it to
3629 try to change a logical combination of comparisons into a range test.
3632 X == 2 || X == 3 || X == 4 || X == 5
3636 (unsigned) (X - 2) <= 3
3638 We describe each set of comparisons as being either inside or outside
3639 a range, using a variable named like IN_P, and then describe the
3640 range with a lower and upper bound. If one of the bounds is omitted,
3641 it represents either the highest or lowest value of the type.
3643 In the comments below, we represent a range by two numbers in brackets
3644 preceded by a "+" to designate being inside that range, or a "-" to
3645 designate being outside that range, so the condition can be inverted by
3646 flipping the prefix. An omitted bound is represented by a "-". For
3647 example, "- [-, 10]" means being outside the range starting at the lowest
3648 possible value and ending at 10, in other words, being greater than 10.
3649 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3652 We set up things so that the missing bounds are handled in a consistent
3653 manner so neither a missing bound nor "true" and "false" need to be
3654 handled using a special case. */
3656 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3657 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3658 and UPPER1_P are nonzero if the respective argument is an upper bound
3659 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3660 must be specified for a comparison. ARG1 will be converted to ARG0's
3661 type if both are specified. */
3664 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3665 tree arg1, int upper1_p)
3671 /* If neither arg represents infinity, do the normal operation.
3672 Else, if not a comparison, return infinity. Else handle the special
3673 comparison rules. Note that most of the cases below won't occur, but
3674 are handled for consistency. */
3676 if (arg0 != 0 && arg1 != 0)
3678 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3679 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3681 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3684 if (TREE_CODE_CLASS (code) != tcc_comparison)
3687 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3688 for neither. In real maths, we cannot assume open ended ranges are
3689 the same. But, this is computer arithmetic, where numbers are finite.
3690 We can therefore make the transformation of any unbounded range with
3691 the value Z, Z being greater than any representable number. This permits
3692 us to treat unbounded ranges as equal. */
3693 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3694 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3698 result = sgn0 == sgn1;
3701 result = sgn0 != sgn1;
3704 result = sgn0 < sgn1;
3707 result = sgn0 <= sgn1;
3710 result = sgn0 > sgn1;
3713 result = sgn0 >= sgn1;
3719 return constant_boolean_node (result, type);
3722 /* Given EXP, a logical expression, set the range it is testing into
3723 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3724 actually being tested. *PLOW and *PHIGH will be made of the same type
3725 as the returned expression. If EXP is not a comparison, we will most
3726 likely not be returning a useful value and range. */
3729 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3731 enum tree_code code;
3732 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3733 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3735 tree low, high, n_low, n_high;
3737 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3738 and see if we can refine the range. Some of the cases below may not
3739 happen, but it doesn't seem worth worrying about this. We "continue"
3740 the outer loop when we've changed something; otherwise we "break"
3741 the switch, which will "break" the while. */
3744 low = high = build_int_cst (TREE_TYPE (exp), 0);
3748 code = TREE_CODE (exp);
3749 exp_type = TREE_TYPE (exp);
3751 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3753 if (TREE_CODE_LENGTH (code) > 0)
3754 arg0 = TREE_OPERAND (exp, 0);
3755 if (TREE_CODE_CLASS (code) == tcc_comparison
3756 || TREE_CODE_CLASS (code) == tcc_unary
3757 || TREE_CODE_CLASS (code) == tcc_binary)
3758 arg0_type = TREE_TYPE (arg0);
3759 if (TREE_CODE_CLASS (code) == tcc_binary
3760 || TREE_CODE_CLASS (code) == tcc_comparison
3761 || (TREE_CODE_CLASS (code) == tcc_expression
3762 && TREE_CODE_LENGTH (code) > 1))
3763 arg1 = TREE_OPERAND (exp, 1);
3768 case TRUTH_NOT_EXPR:
3769 in_p = ! in_p, exp = arg0;
3772 case EQ_EXPR: case NE_EXPR:
3773 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3774 /* We can only do something if the range is testing for zero
3775 and if the second operand is an integer constant. Note that
3776 saying something is "in" the range we make is done by
3777 complementing IN_P since it will set in the initial case of
3778 being not equal to zero; "out" is leaving it alone. */
3779 if (low == 0 || high == 0
3780 || ! integer_zerop (low) || ! integer_zerop (high)
3781 || TREE_CODE (arg1) != INTEGER_CST)
3786 case NE_EXPR: /* - [c, c] */
3789 case EQ_EXPR: /* + [c, c] */
3790 in_p = ! in_p, low = high = arg1;
3792 case GT_EXPR: /* - [-, c] */
3793 low = 0, high = arg1;
3795 case GE_EXPR: /* + [c, -] */
3796 in_p = ! in_p, low = arg1, high = 0;
3798 case LT_EXPR: /* - [c, -] */
3799 low = arg1, high = 0;
3801 case LE_EXPR: /* + [-, c] */
3802 in_p = ! in_p, low = 0, high = arg1;
3808 /* If this is an unsigned comparison, we also know that EXP is
3809 greater than or equal to zero. We base the range tests we make
3810 on that fact, so we record it here so we can parse existing
3811 range tests. We test arg0_type since often the return type
3812 of, e.g. EQ_EXPR, is boolean. */
3813 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3815 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3817 build_int_cst (arg0_type, 0),
3821 in_p = n_in_p, low = n_low, high = n_high;
3823 /* If the high bound is missing, but we have a nonzero low
3824 bound, reverse the range so it goes from zero to the low bound
3826 if (high == 0 && low && ! integer_zerop (low))
3829 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3830 integer_one_node, 0);
3831 low = build_int_cst (arg0_type, 0);
3839 /* (-x) IN [a,b] -> x in [-b, -a] */
3840 n_low = range_binop (MINUS_EXPR, exp_type,
3841 build_int_cst (exp_type, 0),
3843 n_high = range_binop (MINUS_EXPR, exp_type,
3844 build_int_cst (exp_type, 0),
3846 low = n_low, high = n_high;
3852 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3853 build_int_cst (exp_type, 1));
3856 case PLUS_EXPR: case MINUS_EXPR:
3857 if (TREE_CODE (arg1) != INTEGER_CST)
3860 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3861 move a constant to the other side. */
3862 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3865 /* If EXP is signed, any overflow in the computation is undefined,
3866 so we don't worry about it so long as our computations on
3867 the bounds don't overflow. For unsigned, overflow is defined
3868 and this is exactly the right thing. */
3869 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3870 arg0_type, low, 0, arg1, 0);
3871 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3872 arg0_type, high, 1, arg1, 0);
3873 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3874 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3877 /* Check for an unsigned range which has wrapped around the maximum
3878 value thus making n_high < n_low, and normalize it. */
3879 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3881 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3882 integer_one_node, 0);
3883 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3884 integer_one_node, 0);
3886 /* If the range is of the form +/- [ x+1, x ], we won't
3887 be able to normalize it. But then, it represents the
3888 whole range or the empty set, so make it
3890 if (tree_int_cst_equal (n_low, low)
3891 && tree_int_cst_equal (n_high, high))
3897 low = n_low, high = n_high;
3902 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3903 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3906 if (! INTEGRAL_TYPE_P (arg0_type)
3907 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3908 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3911 n_low = low, n_high = high;
3914 n_low = fold_convert (arg0_type, n_low);
3917 n_high = fold_convert (arg0_type, n_high);
3920 /* If we're converting arg0 from an unsigned type, to exp,
3921 a signed type, we will be doing the comparison as unsigned.
3922 The tests above have already verified that LOW and HIGH
3925 So we have to ensure that we will handle large unsigned
3926 values the same way that the current signed bounds treat
3929 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3932 tree equiv_type = lang_hooks.types.type_for_mode
3933 (TYPE_MODE (arg0_type), 1);
3935 /* A range without an upper bound is, naturally, unbounded.
3936 Since convert would have cropped a very large value, use
3937 the max value for the destination type. */
3939 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3940 : TYPE_MAX_VALUE (arg0_type);
3942 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3943 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3944 fold_convert (arg0_type,
3946 fold_convert (arg0_type,
3949 /* If the low bound is specified, "and" the range with the
3950 range for which the original unsigned value will be
3954 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3955 1, n_low, n_high, 1,
3956 fold_convert (arg0_type,
3961 in_p = (n_in_p == in_p);
3965 /* Otherwise, "or" the range with the range of the input
3966 that will be interpreted as negative. */
3967 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3968 0, n_low, n_high, 1,
3969 fold_convert (arg0_type,
3974 in_p = (in_p != n_in_p);
3979 low = n_low, high = n_high;
3989 /* If EXP is a constant, we can evaluate whether this is true or false. */
3990 if (TREE_CODE (exp) == INTEGER_CST)
3992 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3994 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4000 *pin_p = in_p, *plow = low, *phigh = high;
4004 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4005 type, TYPE, return an expression to test if EXP is in (or out of, depending
4006 on IN_P) the range. Return 0 if the test couldn't be created. */
4009 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4011 tree etype = TREE_TYPE (exp);
4014 #ifdef HAVE_canonicalize_funcptr_for_compare
4015 /* Disable this optimization for function pointer expressions
4016 on targets that require function pointer canonicalization. */
4017 if (HAVE_canonicalize_funcptr_for_compare
4018 && TREE_CODE (etype) == POINTER_TYPE
4019 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4025 value = build_range_check (type, exp, 1, low, high);
4027 return invert_truthvalue (value);
4032 if (low == 0 && high == 0)
4033 return build_int_cst (type, 1);
4036 return fold_build2 (LE_EXPR, type, exp,
4037 fold_convert (etype, high));
4040 return fold_build2 (GE_EXPR, type, exp,
4041 fold_convert (etype, low));
4043 if (operand_equal_p (low, high, 0))
4044 return fold_build2 (EQ_EXPR, type, exp,
4045 fold_convert (etype, low));
4047 if (integer_zerop (low))
4049 if (! TYPE_UNSIGNED (etype))
4051 etype = lang_hooks.types.unsigned_type (etype);
4052 high = fold_convert (etype, high);
4053 exp = fold_convert (etype, exp);
4055 return build_range_check (type, exp, 1, 0, high);
4058 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4059 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4061 unsigned HOST_WIDE_INT lo;
4065 prec = TYPE_PRECISION (etype);
4066 if (prec <= HOST_BITS_PER_WIDE_INT)
4069 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4073 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4074 lo = (unsigned HOST_WIDE_INT) -1;
4077 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4079 if (TYPE_UNSIGNED (etype))
4081 etype = lang_hooks.types.signed_type (etype);
4082 exp = fold_convert (etype, exp);
4084 return fold_build2 (GT_EXPR, type, exp,
4085 build_int_cst (etype, 0));
4089 value = const_binop (MINUS_EXPR, high, low, 0);
4090 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4091 && ! TYPE_UNSIGNED (etype))
4093 tree utype, minv, maxv;
4095 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4096 for the type in question, as we rely on this here. */
4097 switch (TREE_CODE (etype))
4102 /* There is no requirement that LOW be within the range of ETYPE
4103 if the latter is a subtype. It must, however, be within the base
4104 type of ETYPE. So be sure we do the subtraction in that type. */
4105 if (TREE_TYPE (etype))
4106 etype = TREE_TYPE (etype);
4107 utype = lang_hooks.types.unsigned_type (etype);
4108 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4109 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4110 integer_one_node, 1);
4111 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4112 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4116 high = fold_convert (etype, high);
4117 low = fold_convert (etype, low);
4118 exp = fold_convert (etype, exp);
4119 value = const_binop (MINUS_EXPR, high, low, 0);
4127 if (value != 0 && ! TREE_OVERFLOW (value))
4129 /* There is no requirement that LOW be within the range of ETYPE
4130 if the latter is a subtype. It must, however, be within the base
4131 type of ETYPE. So be sure we do the subtraction in that type. */
4132 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4134 etype = TREE_TYPE (etype);
4135 exp = fold_convert (etype, exp);
4136 low = fold_convert (etype, low);
4137 value = fold_convert (etype, value);
4140 return build_range_check (type,
4141 fold_build2 (MINUS_EXPR, etype, exp, low),
4142 1, build_int_cst (etype, 0), value);
4148 /* Given two ranges, see if we can merge them into one. Return 1 if we
4149 can, 0 if we can't. Set the output range into the specified parameters. */
4152 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4153 tree high0, int in1_p, tree low1, tree high1)
4161 int lowequal = ((low0 == 0 && low1 == 0)
4162 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4163 low0, 0, low1, 0)));
4164 int highequal = ((high0 == 0 && high1 == 0)
4165 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4166 high0, 1, high1, 1)));
4168 /* Make range 0 be the range that starts first, or ends last if they
4169 start at the same value. Swap them if it isn't. */
4170 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4173 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4174 high1, 1, high0, 1))))
4176 temp = in0_p, in0_p = in1_p, in1_p = temp;
4177 tem = low0, low0 = low1, low1 = tem;
4178 tem = high0, high0 = high1, high1 = tem;
4181 /* Now flag two cases, whether the ranges are disjoint or whether the
4182 second range is totally subsumed in the first. Note that the tests
4183 below are simplified by the ones above. */
4184 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4185 high0, 1, low1, 0));
4186 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4187 high1, 1, high0, 1));
4189 /* We now have four cases, depending on whether we are including or
4190 excluding the two ranges. */
4193 /* If they don't overlap, the result is false. If the second range
4194 is a subset it is the result. Otherwise, the range is from the start
4195 of the second to the end of the first. */
4197 in_p = 0, low = high = 0;
4199 in_p = 1, low = low1, high = high1;
4201 in_p = 1, low = low1, high = high0;
4204 else if (in0_p && ! in1_p)
4206 /* If they don't overlap, the result is the first range. If they are
4207 equal, the result is false. If the second range is a subset of the
4208 first, and the ranges begin at the same place, we go from just after
4209 the end of the first range to the end of the second. If the second
4210 range is not a subset of the first, or if it is a subset and both
4211 ranges end at the same place, the range starts at the start of the
4212 first range and ends just before the second range.
4213 Otherwise, we can't describe this as a single range. */
4215 in_p = 1, low = low0, high = high0;
4216 else if (lowequal && highequal)
4217 in_p = 0, low = high = 0;
4218 else if (subset && lowequal)
4220 in_p = 1, high = high0;
4221 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4222 integer_one_node, 0);
4224 else if (! subset || highequal)
4226 in_p = 1, low = low0;
4227 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4228 integer_one_node, 0);
4234 else if (! in0_p && in1_p)
4236 /* If they don't overlap, the result is the second range. If the second
4237 is a subset of the first, the result is false. Otherwise,
4238 the range starts just after the first range and ends at the
4239 end of the second. */
4241 in_p = 1, low = low1, high = high1;
4242 else if (subset || highequal)
4243 in_p = 0, low = high = 0;
4246 in_p = 1, high = high1;
4247 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4248 integer_one_node, 0);
4254 /* The case where we are excluding both ranges. Here the complex case
4255 is if they don't overlap. In that case, the only time we have a
4256 range is if they are adjacent. If the second is a subset of the
4257 first, the result is the first. Otherwise, the range to exclude
4258 starts at the beginning of the first range and ends at the end of the
4262 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4263 range_binop (PLUS_EXPR, NULL_TREE,
4265 integer_one_node, 1),
4267 in_p = 0, low = low0, high = high1;
4270 /* Canonicalize - [min, x] into - [-, x]. */
4271 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4272 switch (TREE_CODE (TREE_TYPE (low0)))
4275 if (TYPE_PRECISION (TREE_TYPE (low0))
4276 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4281 if (tree_int_cst_equal (low0,
4282 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4286 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4287 && integer_zerop (low0))
4294 /* Canonicalize - [x, max] into - [x, -]. */
4295 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4296 switch (TREE_CODE (TREE_TYPE (high1)))
4299 if (TYPE_PRECISION (TREE_TYPE (high1))
4300 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4305 if (tree_int_cst_equal (high1,
4306 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4310 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4311 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4313 integer_one_node, 1)))
4320 /* The ranges might be also adjacent between the maximum and
4321 minimum values of the given type. For
4322 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4323 return + [x + 1, y - 1]. */
4324 if (low0 == 0 && high1 == 0)
4326 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4327 integer_one_node, 1);
4328 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4329 integer_one_node, 0);
4330 if (low == 0 || high == 0)
4340 in_p = 0, low = low0, high = high0;
4342 in_p = 0, low = low0, high = high1;
4345 *pin_p = in_p, *plow = low, *phigh = high;
4350 /* Subroutine of fold, looking inside expressions of the form
4351 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4352 of the COND_EXPR. This function is being used also to optimize
4353 A op B ? C : A, by reversing the comparison first.
4355 Return a folded expression whose code is not a COND_EXPR
4356 anymore, or NULL_TREE if no folding opportunity is found. */
4359 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4361 enum tree_code comp_code = TREE_CODE (arg0);
4362 tree arg00 = TREE_OPERAND (arg0, 0);
4363 tree arg01 = TREE_OPERAND (arg0, 1);
4364 tree arg1_type = TREE_TYPE (arg1);
4370 /* If we have A op 0 ? A : -A, consider applying the following
4373 A == 0? A : -A same as -A
4374 A != 0? A : -A same as A
4375 A >= 0? A : -A same as abs (A)
4376 A > 0? A : -A same as abs (A)
4377 A <= 0? A : -A same as -abs (A)
4378 A < 0? A : -A same as -abs (A)
4380 None of these transformations work for modes with signed
4381 zeros. If A is +/-0, the first two transformations will
4382 change the sign of the result (from +0 to -0, or vice
4383 versa). The last four will fix the sign of the result,
4384 even though the original expressions could be positive or
4385 negative, depending on the sign of A.
4387 Note that all these transformations are correct if A is
4388 NaN, since the two alternatives (A and -A) are also NaNs. */
4389 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4390 ? real_zerop (arg01)
4391 : integer_zerop (arg01))
4392 && ((TREE_CODE (arg2) == NEGATE_EXPR
4393 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4394 /* In the case that A is of the form X-Y, '-A' (arg2) may
4395 have already been folded to Y-X, check for that. */
4396 || (TREE_CODE (arg1) == MINUS_EXPR
4397 && TREE_CODE (arg2) == MINUS_EXPR
4398 && operand_equal_p (TREE_OPERAND (arg1, 0),
4399 TREE_OPERAND (arg2, 1), 0)
4400 && operand_equal_p (TREE_OPERAND (arg1, 1),
4401 TREE_OPERAND (arg2, 0), 0))))
4406 tem = fold_convert (arg1_type, arg1);
4407 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4410 return pedantic_non_lvalue (fold_convert (type, arg1));
4413 if (flag_trapping_math)
4418 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4419 arg1 = fold_convert (lang_hooks.types.signed_type
4420 (TREE_TYPE (arg1)), arg1);
4421 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4422 return pedantic_non_lvalue (fold_convert (type, tem));
4425 if (flag_trapping_math)
4429 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4430 arg1 = fold_convert (lang_hooks.types.signed_type
4431 (TREE_TYPE (arg1)), arg1);
4432 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4433 return negate_expr (fold_convert (type, tem));
4435 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4439 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4440 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4441 both transformations are correct when A is NaN: A != 0
4442 is then true, and A == 0 is false. */
4444 if (integer_zerop (arg01) && integer_zerop (arg2))
4446 if (comp_code == NE_EXPR)
4447 return pedantic_non_lvalue (fold_convert (type, arg1));
4448 else if (comp_code == EQ_EXPR)
4449 return build_int_cst (type, 0);
4452 /* Try some transformations of A op B ? A : B.
4454 A == B? A : B same as B
4455 A != B? A : B same as A
4456 A >= B? A : B same as max (A, B)
4457 A > B? A : B same as max (B, A)
4458 A <= B? A : B same as min (A, B)
4459 A < B? A : B same as min (B, A)
4461 As above, these transformations don't work in the presence
4462 of signed zeros. For example, if A and B are zeros of
4463 opposite sign, the first two transformations will change
4464 the sign of the result. In the last four, the original
4465 expressions give different results for (A=+0, B=-0) and
4466 (A=-0, B=+0), but the transformed expressions do not.
4468 The first two transformations are correct if either A or B
4469 is a NaN. In the first transformation, the condition will
4470 be false, and B will indeed be chosen. In the case of the
4471 second transformation, the condition A != B will be true,
4472 and A will be chosen.
4474 The conversions to max() and min() are not correct if B is
4475 a number and A is not. The conditions in the original
4476 expressions will be false, so all four give B. The min()
4477 and max() versions would give a NaN instead. */
4478 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4479 /* Avoid these transformations if the COND_EXPR may be used
4480 as an lvalue in the C++ front-end. PR c++/19199. */
4482 || strcmp (lang_hooks.name, "GNU C++") != 0
4483 || ! maybe_lvalue_p (arg1)
4484 || ! maybe_lvalue_p (arg2)))
4486 tree comp_op0 = arg00;
4487 tree comp_op1 = arg01;
4488 tree comp_type = TREE_TYPE (comp_op0);
4490 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4491 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4501 return pedantic_non_lvalue (fold_convert (type, arg2));
4503 return pedantic_non_lvalue (fold_convert (type, arg1));
4508 /* In C++ a ?: expression can be an lvalue, so put the
4509 operand which will be used if they are equal first
4510 so that we can convert this back to the
4511 corresponding COND_EXPR. */
4512 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4514 comp_op0 = fold_convert (comp_type, comp_op0);
4515 comp_op1 = fold_convert (comp_type, comp_op1);
4516 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4517 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4518 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4519 return pedantic_non_lvalue (fold_convert (type, tem));
4526 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4528 comp_op0 = fold_convert (comp_type, comp_op0);
4529 comp_op1 = fold_convert (comp_type, comp_op1);
4530 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4531 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4532 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4533 return pedantic_non_lvalue (fold_convert (type, tem));
4537 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4538 return pedantic_non_lvalue (fold_convert (type, arg2));
4541 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4542 return pedantic_non_lvalue (fold_convert (type, arg1));
4545 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4550 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4551 we might still be able to simplify this. For example,
4552 if C1 is one less or one more than C2, this might have started
4553 out as a MIN or MAX and been transformed by this function.
4554 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4556 if (INTEGRAL_TYPE_P (type)
4557 && TREE_CODE (arg01) == INTEGER_CST
4558 && TREE_CODE (arg2) == INTEGER_CST)
4562 /* We can replace A with C1 in this case. */
4563 arg1 = fold_convert (type, arg01);
4564 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4567 /* If C1 is C2 + 1, this is min(A, C2). */
4568 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4570 && operand_equal_p (arg01,
4571 const_binop (PLUS_EXPR, arg2,
4572 integer_one_node, 0),
4574 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4579 /* If C1 is C2 - 1, this is min(A, C2). */
4580 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4582 && operand_equal_p (arg01,
4583 const_binop (MINUS_EXPR, arg2,
4584 integer_one_node, 0),
4586 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4591 /* If C1 is C2 - 1, this is max(A, C2). */
4592 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4594 && operand_equal_p (arg01,
4595 const_binop (MINUS_EXPR, arg2,
4596 integer_one_node, 0),
4598 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4603 /* If C1 is C2 + 1, this is max(A, C2). */
4604 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4606 && operand_equal_p (arg01,
4607 const_binop (PLUS_EXPR, arg2,
4608 integer_one_node, 0),
4610 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4624 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4625 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4628 /* EXP is some logical combination of boolean tests. See if we can
4629 merge it into some range test. Return the new tree if so. */
4632 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4634 int or_op = (code == TRUTH_ORIF_EXPR
4635 || code == TRUTH_OR_EXPR);
4636 int in0_p, in1_p, in_p;
4637 tree low0, low1, low, high0, high1, high;
4638 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4639 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4642 /* If this is an OR operation, invert both sides; we will invert
4643 again at the end. */
4645 in0_p = ! in0_p, in1_p = ! in1_p;
4647 /* If both expressions are the same, if we can merge the ranges, and we
4648 can build the range test, return it or it inverted. If one of the
4649 ranges is always true or always false, consider it to be the same
4650 expression as the other. */
4651 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4652 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4654 && 0 != (tem = (build_range_check (type,
4656 : rhs != 0 ? rhs : integer_zero_node,
4658 return or_op ? invert_truthvalue (tem) : tem;
4660 /* On machines where the branch cost is expensive, if this is a
4661 short-circuited branch and the underlying object on both sides
4662 is the same, make a non-short-circuit operation. */
4663 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4664 && lhs != 0 && rhs != 0
4665 && (code == TRUTH_ANDIF_EXPR
4666 || code == TRUTH_ORIF_EXPR)
4667 && operand_equal_p (lhs, rhs, 0))
4669 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4670 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4671 which cases we can't do this. */
4672 if (simple_operand_p (lhs))
4673 return build2 (code == TRUTH_ANDIF_EXPR
4674 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4677 else if (lang_hooks.decls.global_bindings_p () == 0
4678 && ! CONTAINS_PLACEHOLDER_P (lhs))
4680 tree common = save_expr (lhs);
4682 if (0 != (lhs = build_range_check (type, common,
4683 or_op ? ! in0_p : in0_p,
4685 && (0 != (rhs = build_range_check (type, common,
4686 or_op ? ! in1_p : in1_p,
4688 return build2 (code == TRUTH_ANDIF_EXPR
4689 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4697 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4698 bit value. Arrange things so the extra bits will be set to zero if and
4699 only if C is signed-extended to its full width. If MASK is nonzero,
4700 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4703 unextend (tree c, int p, int unsignedp, tree mask)
4705 tree type = TREE_TYPE (c);
4706 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4709 if (p == modesize || unsignedp)
4712 /* We work by getting just the sign bit into the low-order bit, then
4713 into the high-order bit, then sign-extend. We then XOR that value
4715 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4716 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4718 /* We must use a signed type in order to get an arithmetic right shift.
4719 However, we must also avoid introducing accidental overflows, so that
4720 a subsequent call to integer_zerop will work. Hence we must
4721 do the type conversion here. At this point, the constant is either
4722 zero or one, and the conversion to a signed type can never overflow.
4723 We could get an overflow if this conversion is done anywhere else. */
4724 if (TYPE_UNSIGNED (type))
4725 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4727 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4728 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4730 temp = const_binop (BIT_AND_EXPR, temp,
4731 fold_convert (TREE_TYPE (c), mask), 0);
4732 /* If necessary, convert the type back to match the type of C. */
4733 if (TYPE_UNSIGNED (type))
4734 temp = fold_convert (type, temp);
4736 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4739 /* Find ways of folding logical expressions of LHS and RHS:
4740 Try to merge two comparisons to the same innermost item.
4741 Look for range tests like "ch >= '0' && ch <= '9'".
4742 Look for combinations of simple terms on machines with expensive branches
4743 and evaluate the RHS unconditionally.
4745 For example, if we have p->a == 2 && p->b == 4 and we can make an
4746 object large enough to span both A and B, we can do this with a comparison
4747 against the object ANDed with the a mask.
4749 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4750 operations to do this with one comparison.
4752 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4753 function and the one above.
4755 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4756 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4758 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4761 We return the simplified tree or 0 if no optimization is possible. */
4764 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4766 /* If this is the "or" of two comparisons, we can do something if
4767 the comparisons are NE_EXPR. If this is the "and", we can do something
4768 if the comparisons are EQ_EXPR. I.e.,
4769 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4771 WANTED_CODE is this operation code. For single bit fields, we can
4772 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4773 comparison for one-bit fields. */
4775 enum tree_code wanted_code;
4776 enum tree_code lcode, rcode;
4777 tree ll_arg, lr_arg, rl_arg, rr_arg;
4778 tree ll_inner, lr_inner, rl_inner, rr_inner;
4779 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4780 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4781 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4782 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4783 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4784 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4785 enum machine_mode lnmode, rnmode;
4786 tree ll_mask, lr_mask, rl_mask, rr_mask;
4787 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4788 tree l_const, r_const;
4789 tree lntype, rntype, result;
4790 int first_bit, end_bit;
4793 /* Start by getting the comparison codes. Fail if anything is volatile.
4794 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4795 it were surrounded with a NE_EXPR. */
4797 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4800 lcode = TREE_CODE (lhs);
4801 rcode = TREE_CODE (rhs);
4803 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4805 lhs = build2 (NE_EXPR, truth_type, lhs,
4806 build_int_cst (TREE_TYPE (lhs), 0));
4810 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4812 rhs = build2 (NE_EXPR, truth_type, rhs,
4813 build_int_cst (TREE_TYPE (rhs), 0));
4817 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4818 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4821 ll_arg = TREE_OPERAND (lhs, 0);
4822 lr_arg = TREE_OPERAND (lhs, 1);
4823 rl_arg = TREE_OPERAND (rhs, 0);
4824 rr_arg = TREE_OPERAND (rhs, 1);
4826 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4827 if (simple_operand_p (ll_arg)
4828 && simple_operand_p (lr_arg))
4831 if (operand_equal_p (ll_arg, rl_arg, 0)
4832 && operand_equal_p (lr_arg, rr_arg, 0))
4834 result = combine_comparisons (code, lcode, rcode,
4835 truth_type, ll_arg, lr_arg);
4839 else if (operand_equal_p (ll_arg, rr_arg, 0)
4840 && operand_equal_p (lr_arg, rl_arg, 0))
4842 result = combine_comparisons (code, lcode,
4843 swap_tree_comparison (rcode),
4844 truth_type, ll_arg, lr_arg);
4850 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4851 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4853 /* If the RHS can be evaluated unconditionally and its operands are
4854 simple, it wins to evaluate the RHS unconditionally on machines
4855 with expensive branches. In this case, this isn't a comparison
4856 that can be merged. Avoid doing this if the RHS is a floating-point
4857 comparison since those can trap. */
4859 if (BRANCH_COST >= 2
4860 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4861 && simple_operand_p (rl_arg)
4862 && simple_operand_p (rr_arg))
4864 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4865 if (code == TRUTH_OR_EXPR
4866 && lcode == NE_EXPR && integer_zerop (lr_arg)
4867 && rcode == NE_EXPR && integer_zerop (rr_arg)
4868 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4869 return build2 (NE_EXPR, truth_type,
4870 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4872 build_int_cst (TREE_TYPE (ll_arg), 0));
4874 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4875 if (code == TRUTH_AND_EXPR
4876 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4877 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4878 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4879 return build2 (EQ_EXPR, truth_type,
4880 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4882 build_int_cst (TREE_TYPE (ll_arg), 0));
4884 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4885 return build2 (code, truth_type, lhs, rhs);
4888 /* See if the comparisons can be merged. Then get all the parameters for
4891 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4892 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4896 ll_inner = decode_field_reference (ll_arg,
4897 &ll_bitsize, &ll_bitpos, &ll_mode,
4898 &ll_unsignedp, &volatilep, &ll_mask,
4900 lr_inner = decode_field_reference (lr_arg,
4901 &lr_bitsize, &lr_bitpos, &lr_mode,
4902 &lr_unsignedp, &volatilep, &lr_mask,
4904 rl_inner = decode_field_reference (rl_arg,
4905 &rl_bitsize, &rl_bitpos, &rl_mode,
4906 &rl_unsignedp, &volatilep, &rl_mask,
4908 rr_inner = decode_field_reference (rr_arg,
4909 &rr_bitsize, &rr_bitpos, &rr_mode,
4910 &rr_unsignedp, &volatilep, &rr_mask,
4913 /* It must be true that the inner operation on the lhs of each
4914 comparison must be the same if we are to be able to do anything.
4915 Then see if we have constants. If not, the same must be true for
4917 if (volatilep || ll_inner == 0 || rl_inner == 0
4918 || ! operand_equal_p (ll_inner, rl_inner, 0))
4921 if (TREE_CODE (lr_arg) == INTEGER_CST
4922 && TREE_CODE (rr_arg) == INTEGER_CST)
4923 l_const = lr_arg, r_const = rr_arg;
4924 else if (lr_inner == 0 || rr_inner == 0
4925 || ! operand_equal_p (lr_inner, rr_inner, 0))
4928 l_const = r_const = 0;
4930 /* If either comparison code is not correct for our logical operation,
4931 fail. However, we can convert a one-bit comparison against zero into
4932 the opposite comparison against that bit being set in the field. */
4934 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4935 if (lcode != wanted_code)
4937 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4939 /* Make the left operand unsigned, since we are only interested
4940 in the value of one bit. Otherwise we are doing the wrong
4949 /* This is analogous to the code for l_const above. */
4950 if (rcode != wanted_code)
4952 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4961 /* After this point all optimizations will generate bit-field
4962 references, which we might not want. */
4963 if (! lang_hooks.can_use_bit_fields_p ())
4966 /* See if we can find a mode that contains both fields being compared on
4967 the left. If we can't, fail. Otherwise, update all constants and masks
4968 to be relative to a field of that size. */
4969 first_bit = MIN (ll_bitpos, rl_bitpos);
4970 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4971 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4972 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4974 if (lnmode == VOIDmode)
4977 lnbitsize = GET_MODE_BITSIZE (lnmode);
4978 lnbitpos = first_bit & ~ (lnbitsize - 1);
4979 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4980 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4982 if (BYTES_BIG_ENDIAN)
4984 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4985 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4988 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4989 size_int (xll_bitpos), 0);
4990 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4991 size_int (xrl_bitpos), 0);
4995 l_const = fold_convert (lntype, l_const);
4996 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4997 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4998 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4999 fold_build1 (BIT_NOT_EXPR,
5003 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5005 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5010 r_const = fold_convert (lntype, r_const);
5011 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5012 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5013 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5014 fold_build1 (BIT_NOT_EXPR,
5018 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5020 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5024 /* If the right sides are not constant, do the same for it. Also,
5025 disallow this optimization if a size or signedness mismatch occurs
5026 between the left and right sides. */
5029 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5030 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5031 /* Make sure the two fields on the right
5032 correspond to the left without being swapped. */
5033 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5036 first_bit = MIN (lr_bitpos, rr_bitpos);
5037 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5038 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5039 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5041 if (rnmode == VOIDmode)
5044 rnbitsize = GET_MODE_BITSIZE (rnmode);
5045 rnbitpos = first_bit & ~ (rnbitsize - 1);
5046 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5047 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5049 if (BYTES_BIG_ENDIAN)
5051 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5052 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5055 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5056 size_int (xlr_bitpos), 0);
5057 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5058 size_int (xrr_bitpos), 0);
5060 /* Make a mask that corresponds to both fields being compared.
5061 Do this for both items being compared. If the operands are the
5062 same size and the bits being compared are in the same position
5063 then we can do this by masking both and comparing the masked
5065 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5066 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5067 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5069 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5070 ll_unsignedp || rl_unsignedp);
5071 if (! all_ones_mask_p (ll_mask, lnbitsize))
5072 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5074 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5075 lr_unsignedp || rr_unsignedp);
5076 if (! all_ones_mask_p (lr_mask, rnbitsize))
5077 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5079 return build2 (wanted_code, truth_type, lhs, rhs);
5082 /* There is still another way we can do something: If both pairs of
5083 fields being compared are adjacent, we may be able to make a wider
5084 field containing them both.
5086 Note that we still must mask the lhs/rhs expressions. Furthermore,
5087 the mask must be shifted to account for the shift done by
5088 make_bit_field_ref. */
5089 if ((ll_bitsize + ll_bitpos == rl_bitpos
5090 && lr_bitsize + lr_bitpos == rr_bitpos)
5091 || (ll_bitpos == rl_bitpos + rl_bitsize
5092 && lr_bitpos == rr_bitpos + rr_bitsize))
5096 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5097 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5098 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5099 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5101 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5102 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5103 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5104 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5106 /* Convert to the smaller type before masking out unwanted bits. */
5108 if (lntype != rntype)
5110 if (lnbitsize > rnbitsize)
5112 lhs = fold_convert (rntype, lhs);
5113 ll_mask = fold_convert (rntype, ll_mask);
5116 else if (lnbitsize < rnbitsize)
5118 rhs = fold_convert (lntype, rhs);
5119 lr_mask = fold_convert (lntype, lr_mask);
5124 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5125 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5127 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5128 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5130 return build2 (wanted_code, truth_type, lhs, rhs);
5136 /* Handle the case of comparisons with constants. If there is something in
5137 common between the masks, those bits of the constants must be the same.
5138 If not, the condition is always false. Test for this to avoid generating
5139 incorrect code below. */
5140 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5141 if (! integer_zerop (result)
5142 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5143 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5145 if (wanted_code == NE_EXPR)
5147 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5148 return constant_boolean_node (true, truth_type);
5152 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5153 return constant_boolean_node (false, truth_type);
5157 /* Construct the expression we will return. First get the component
5158 reference we will make. Unless the mask is all ones the width of
5159 that field, perform the mask operation. Then compare with the
5161 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5162 ll_unsignedp || rl_unsignedp);
5164 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5165 if (! all_ones_mask_p (ll_mask, lnbitsize))
5166 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5168 return build2 (wanted_code, truth_type, result,
5169 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5172 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5176 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5179 enum tree_code op_code;
5180 tree comp_const = op1;
5182 int consts_equal, consts_lt;
5185 STRIP_SIGN_NOPS (arg0);
5187 op_code = TREE_CODE (arg0);
5188 minmax_const = TREE_OPERAND (arg0, 1);
5189 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5190 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5191 inner = TREE_OPERAND (arg0, 0);
5193 /* If something does not permit us to optimize, return the original tree. */
5194 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5195 || TREE_CODE (comp_const) != INTEGER_CST
5196 || TREE_CONSTANT_OVERFLOW (comp_const)
5197 || TREE_CODE (minmax_const) != INTEGER_CST
5198 || TREE_CONSTANT_OVERFLOW (minmax_const))
5201 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5202 and GT_EXPR, doing the rest with recursive calls using logical
5206 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5208 /* FIXME: We should be able to invert code without building a
5209 scratch tree node, but doing so would require us to
5210 duplicate a part of invert_truthvalue here. */
5211 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5212 tem = optimize_minmax_comparison (TREE_CODE (tem),
5214 TREE_OPERAND (tem, 0),
5215 TREE_OPERAND (tem, 1));
5216 return invert_truthvalue (tem);
5221 fold_build2 (TRUTH_ORIF_EXPR, type,
5222 optimize_minmax_comparison
5223 (EQ_EXPR, type, arg0, comp_const),
5224 optimize_minmax_comparison
5225 (GT_EXPR, type, arg0, comp_const));
5228 if (op_code == MAX_EXPR && consts_equal)
5229 /* MAX (X, 0) == 0 -> X <= 0 */
5230 return fold_build2 (LE_EXPR, type, inner, comp_const);
5232 else if (op_code == MAX_EXPR && consts_lt)
5233 /* MAX (X, 0) == 5 -> X == 5 */
5234 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5236 else if (op_code == MAX_EXPR)
5237 /* MAX (X, 0) == -1 -> false */
5238 return omit_one_operand (type, integer_zero_node, inner);
5240 else if (consts_equal)
5241 /* MIN (X, 0) == 0 -> X >= 0 */
5242 return fold_build2 (GE_EXPR, type, inner, comp_const);
5245 /* MIN (X, 0) == 5 -> false */
5246 return omit_one_operand (type, integer_zero_node, inner);
5249 /* MIN (X, 0) == -1 -> X == -1 */
5250 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5253 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5254 /* MAX (X, 0) > 0 -> X > 0
5255 MAX (X, 0) > 5 -> X > 5 */
5256 return fold_build2 (GT_EXPR, type, inner, comp_const);
5258 else if (op_code == MAX_EXPR)
5259 /* MAX (X, 0) > -1 -> true */
5260 return omit_one_operand (type, integer_one_node, inner);
5262 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5263 /* MIN (X, 0) > 0 -> false
5264 MIN (X, 0) > 5 -> false */
5265 return omit_one_operand (type, integer_zero_node, inner);
5268 /* MIN (X, 0) > -1 -> X > -1 */
5269 return fold_build2 (GT_EXPR, type, inner, comp_const);
5276 /* T is an integer expression that is being multiplied, divided, or taken a
5277 modulus (CODE says which and what kind of divide or modulus) by a
5278 constant C. See if we can eliminate that operation by folding it with
5279 other operations already in T. WIDE_TYPE, if non-null, is a type that
5280 should be used for the computation if wider than our type.
5282 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5283 (X * 2) + (Y * 4). We must, however, be assured that either the original
5284 expression would not overflow or that overflow is undefined for the type
5285 in the language in question.
5287 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5288 the machine has a multiply-accumulate insn or that this is part of an
5289 addressing calculation.
5291 If we return a non-null expression, it is an equivalent form of the
5292 original computation, but need not be in the original type. */
5295 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5297 /* To avoid exponential search depth, refuse to allow recursion past
5298 three levels. Beyond that (1) it's highly unlikely that we'll find
5299 something interesting and (2) we've probably processed it before
5300 when we built the inner expression. */
5309 ret = extract_muldiv_1 (t, c, code, wide_type);
5316 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5318 tree type = TREE_TYPE (t);
5319 enum tree_code tcode = TREE_CODE (t);
5320 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5321 > GET_MODE_SIZE (TYPE_MODE (type)))
5322 ? wide_type : type);
5324 int same_p = tcode == code;
5325 tree op0 = NULL_TREE, op1 = NULL_TREE;
5327 /* Don't deal with constants of zero here; they confuse the code below. */
5328 if (integer_zerop (c))
5331 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5332 op0 = TREE_OPERAND (t, 0);
5334 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5335 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5337 /* Note that we need not handle conditional operations here since fold
5338 already handles those cases. So just do arithmetic here. */
5342 /* For a constant, we can always simplify if we are a multiply
5343 or (for divide and modulus) if it is a multiple of our constant. */
5344 if (code == MULT_EXPR
5345 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5346 return const_binop (code, fold_convert (ctype, t),
5347 fold_convert (ctype, c), 0);
5350 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5351 /* If op0 is an expression ... */
5352 if ((COMPARISON_CLASS_P (op0)
5353 || UNARY_CLASS_P (op0)
5354 || BINARY_CLASS_P (op0)
5355 || EXPRESSION_CLASS_P (op0))
5356 /* ... and is unsigned, and its type is smaller than ctype,
5357 then we cannot pass through as widening. */
5358 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5359 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5360 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5361 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5362 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5363 /* ... or this is a truncation (t is narrower than op0),
5364 then we cannot pass through this narrowing. */
5365 || (GET_MODE_SIZE (TYPE_MODE (type))
5366 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5367 /* ... or signedness changes for division or modulus,
5368 then we cannot pass through this conversion. */
5369 || (code != MULT_EXPR
5370 && (TYPE_UNSIGNED (ctype)
5371 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5374 /* Pass the constant down and see if we can make a simplification. If
5375 we can, replace this expression with the inner simplification for
5376 possible later conversion to our or some other type. */
5377 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5378 && TREE_CODE (t2) == INTEGER_CST
5379 && ! TREE_CONSTANT_OVERFLOW (t2)
5380 && (0 != (t1 = extract_muldiv (op0, t2, code,
5382 ? ctype : NULL_TREE))))
5387 /* If widening the type changes it from signed to unsigned, then we
5388 must avoid building ABS_EXPR itself as unsigned. */
5389 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5391 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5392 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5394 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5395 return fold_convert (ctype, t1);
5401 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5402 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5405 case MIN_EXPR: case MAX_EXPR:
5406 /* If widening the type changes the signedness, then we can't perform
5407 this optimization as that changes the result. */
5408 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5411 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5412 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5413 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5415 if (tree_int_cst_sgn (c) < 0)
5416 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5418 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5419 fold_convert (ctype, t2));
5423 case LSHIFT_EXPR: case RSHIFT_EXPR:
5424 /* If the second operand is constant, this is a multiplication
5425 or floor division, by a power of two, so we can treat it that
5426 way unless the multiplier or divisor overflows. Signed
5427 left-shift overflow is implementation-defined rather than
5428 undefined in C90, so do not convert signed left shift into
5430 if (TREE_CODE (op1) == INTEGER_CST
5431 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5432 /* const_binop may not detect overflow correctly,
5433 so check for it explicitly here. */
5434 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5435 && TREE_INT_CST_HIGH (op1) == 0
5436 && 0 != (t1 = fold_convert (ctype,
5437 const_binop (LSHIFT_EXPR,
5440 && ! TREE_OVERFLOW (t1))
5441 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5442 ? MULT_EXPR : FLOOR_DIV_EXPR,
5443 ctype, fold_convert (ctype, op0), t1),
5444 c, code, wide_type);
5447 case PLUS_EXPR: case MINUS_EXPR:
5448 /* See if we can eliminate the operation on both sides. If we can, we
5449 can return a new PLUS or MINUS. If we can't, the only remaining
5450 cases where we can do anything are if the second operand is a
5452 t1 = extract_muldiv (op0, c, code, wide_type);
5453 t2 = extract_muldiv (op1, c, code, wide_type);
5454 if (t1 != 0 && t2 != 0
5455 && (code == MULT_EXPR
5456 /* If not multiplication, we can only do this if both operands
5457 are divisible by c. */
5458 || (multiple_of_p (ctype, op0, c)
5459 && multiple_of_p (ctype, op1, c))))
5460 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5461 fold_convert (ctype, t2));
5463 /* If this was a subtraction, negate OP1 and set it to be an addition.
5464 This simplifies the logic below. */
5465 if (tcode == MINUS_EXPR)
5466 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5468 if (TREE_CODE (op1) != INTEGER_CST)
5471 /* If either OP1 or C are negative, this optimization is not safe for
5472 some of the division and remainder types while for others we need
5473 to change the code. */
5474 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5476 if (code == CEIL_DIV_EXPR)
5477 code = FLOOR_DIV_EXPR;
5478 else if (code == FLOOR_DIV_EXPR)
5479 code = CEIL_DIV_EXPR;
5480 else if (code != MULT_EXPR
5481 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5485 /* If it's a multiply or a division/modulus operation of a multiple
5486 of our constant, do the operation and verify it doesn't overflow. */
5487 if (code == MULT_EXPR
5488 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5490 op1 = const_binop (code, fold_convert (ctype, op1),
5491 fold_convert (ctype, c), 0);
5492 /* We allow the constant to overflow with wrapping semantics. */
5494 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5500 /* If we have an unsigned type is not a sizetype, we cannot widen
5501 the operation since it will change the result if the original
5502 computation overflowed. */
5503 if (TYPE_UNSIGNED (ctype)
5504 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5508 /* If we were able to eliminate our operation from the first side,
5509 apply our operation to the second side and reform the PLUS. */
5510 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5511 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5513 /* The last case is if we are a multiply. In that case, we can
5514 apply the distributive law to commute the multiply and addition
5515 if the multiplication of the constants doesn't overflow. */
5516 if (code == MULT_EXPR)
5517 return fold_build2 (tcode, ctype,
5518 fold_build2 (code, ctype,
5519 fold_convert (ctype, op0),
5520 fold_convert (ctype, c)),
5526 /* We have a special case here if we are doing something like
5527 (C * 8) % 4 since we know that's zero. */
5528 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5529 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5530 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5531 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5532 return omit_one_operand (type, integer_zero_node, op0);
5534 /* ... fall through ... */
5536 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5537 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5538 /* If we can extract our operation from the LHS, do so and return a
5539 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5540 do something only if the second operand is a constant. */
5542 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5543 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5544 fold_convert (ctype, op1));
5545 else if (tcode == MULT_EXPR && code == MULT_EXPR
5546 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5547 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5548 fold_convert (ctype, t1));
5549 else if (TREE_CODE (op1) != INTEGER_CST)
5552 /* If these are the same operation types, we can associate them
5553 assuming no overflow. */
5555 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5556 fold_convert (ctype, c), 0))
5557 && ! TREE_OVERFLOW (t1))
5558 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5560 /* If these operations "cancel" each other, we have the main
5561 optimizations of this pass, which occur when either constant is a
5562 multiple of the other, in which case we replace this with either an
5563 operation or CODE or TCODE.
5565 If we have an unsigned type that is not a sizetype, we cannot do
5566 this since it will change the result if the original computation
5568 if ((! TYPE_UNSIGNED (ctype)
5569 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5571 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5572 || (tcode == MULT_EXPR
5573 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5574 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5576 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5577 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5578 fold_convert (ctype,
5579 const_binop (TRUNC_DIV_EXPR,
5581 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5582 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5583 fold_convert (ctype,
5584 const_binop (TRUNC_DIV_EXPR,
5596 /* Return a node which has the indicated constant VALUE (either 0 or
5597 1), and is of the indicated TYPE. */
5600 constant_boolean_node (int value, tree type)
5602 if (type == integer_type_node)
5603 return value ? integer_one_node : integer_zero_node;
5604 else if (type == boolean_type_node)
5605 return value ? boolean_true_node : boolean_false_node;
5607 return build_int_cst (type, value);
5611 /* Return true if expr looks like an ARRAY_REF and set base and
5612 offset to the appropriate trees. If there is no offset,
5613 offset is set to NULL_TREE. Base will be canonicalized to
5614 something you can get the element type from using
5615 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5616 in bytes to the base. */
5619 extract_array_ref (tree expr, tree *base, tree *offset)
5621 /* One canonical form is a PLUS_EXPR with the first
5622 argument being an ADDR_EXPR with a possible NOP_EXPR
5624 if (TREE_CODE (expr) == PLUS_EXPR)
5626 tree op0 = TREE_OPERAND (expr, 0);
5627 tree inner_base, dummy1;
5628 /* Strip NOP_EXPRs here because the C frontends and/or
5629 folders present us (int *)&x.a + 4B possibly. */
5631 if (extract_array_ref (op0, &inner_base, &dummy1))
5634 if (dummy1 == NULL_TREE)
5635 *offset = TREE_OPERAND (expr, 1);
5637 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5638 dummy1, TREE_OPERAND (expr, 1));
5642 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5643 which we transform into an ADDR_EXPR with appropriate
5644 offset. For other arguments to the ADDR_EXPR we assume
5645 zero offset and as such do not care about the ADDR_EXPR
5646 type and strip possible nops from it. */
5647 else if (TREE_CODE (expr) == ADDR_EXPR)
5649 tree op0 = TREE_OPERAND (expr, 0);
5650 if (TREE_CODE (op0) == ARRAY_REF)
5652 tree idx = TREE_OPERAND (op0, 1);
5653 *base = TREE_OPERAND (op0, 0);
5654 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5655 array_ref_element_size (op0));
5659 /* Handle array-to-pointer decay as &a. */
5660 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5661 *base = TREE_OPERAND (expr, 0);
5664 *offset = NULL_TREE;
5668 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5669 else if (SSA_VAR_P (expr)
5670 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5673 *offset = NULL_TREE;
5681 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5682 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5683 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5684 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5685 COND is the first argument to CODE; otherwise (as in the example
5686 given here), it is the second argument. TYPE is the type of the
5687 original expression. Return NULL_TREE if no simplification is
5691 fold_binary_op_with_conditional_arg (enum tree_code code,
5692 tree type, tree op0, tree op1,
5693 tree cond, tree arg, int cond_first_p)
5695 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5696 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5697 tree test, true_value, false_value;
5698 tree lhs = NULL_TREE;
5699 tree rhs = NULL_TREE;
5701 /* This transformation is only worthwhile if we don't have to wrap
5702 arg in a SAVE_EXPR, and the operation can be simplified on at least
5703 one of the branches once its pushed inside the COND_EXPR. */
5704 if (!TREE_CONSTANT (arg))
5707 if (TREE_CODE (cond) == COND_EXPR)
5709 test = TREE_OPERAND (cond, 0);
5710 true_value = TREE_OPERAND (cond, 1);
5711 false_value = TREE_OPERAND (cond, 2);
5712 /* If this operand throws an expression, then it does not make
5713 sense to try to perform a logical or arithmetic operation
5715 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5717 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5722 tree testtype = TREE_TYPE (cond);
5724 true_value = constant_boolean_node (true, testtype);
5725 false_value = constant_boolean_node (false, testtype);
5728 arg = fold_convert (arg_type, arg);
5731 true_value = fold_convert (cond_type, true_value);
5733 lhs = fold_build2 (code, type, true_value, arg);
5735 lhs = fold_build2 (code, type, arg, true_value);
5739 false_value = fold_convert (cond_type, false_value);
5741 rhs = fold_build2 (code, type, false_value, arg);
5743 rhs = fold_build2 (code, type, arg, false_value);
5746 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5747 return fold_convert (type, test);
5751 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5753 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5754 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5755 ADDEND is the same as X.
5757 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5758 and finite. The problematic cases are when X is zero, and its mode
5759 has signed zeros. In the case of rounding towards -infinity,
5760 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5761 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5764 fold_real_zero_addition_p (tree type, tree addend, int negate)
5766 if (!real_zerop (addend))
5769 /* Don't allow the fold with -fsignaling-nans. */
5770 if (HONOR_SNANS (TYPE_MODE (type)))
5773 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5774 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5777 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5778 if (TREE_CODE (addend) == REAL_CST
5779 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5782 /* The mode has signed zeros, and we have to honor their sign.
5783 In this situation, there is only one case we can return true for.
5784 X - 0 is the same as X unless rounding towards -infinity is
5786 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5789 /* Subroutine of fold() that checks comparisons of built-in math
5790 functions against real constants.
5792 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5793 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5794 is the type of the result and ARG0 and ARG1 are the operands of the
5795 comparison. ARG1 must be a TREE_REAL_CST.
5797 The function returns the constant folded tree if a simplification
5798 can be made, and NULL_TREE otherwise. */
5801 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5802 tree type, tree arg0, tree arg1)
5806 if (BUILTIN_SQRT_P (fcode))
5808 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5809 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5811 c = TREE_REAL_CST (arg1);
5812 if (REAL_VALUE_NEGATIVE (c))
5814 /* sqrt(x) < y is always false, if y is negative. */
5815 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5816 return omit_one_operand (type, integer_zero_node, arg);
5818 /* sqrt(x) > y is always true, if y is negative and we
5819 don't care about NaNs, i.e. negative values of x. */
5820 if (code == NE_EXPR || !HONOR_NANS (mode))
5821 return omit_one_operand (type, integer_one_node, arg);
5823 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5824 return fold_build2 (GE_EXPR, type, arg,
5825 build_real (TREE_TYPE (arg), dconst0));
5827 else if (code == GT_EXPR || code == GE_EXPR)
5831 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5832 real_convert (&c2, mode, &c2);
5834 if (REAL_VALUE_ISINF (c2))
5836 /* sqrt(x) > y is x == +Inf, when y is very large. */
5837 if (HONOR_INFINITIES (mode))
5838 return fold_build2 (EQ_EXPR, type, arg,
5839 build_real (TREE_TYPE (arg), c2));
5841 /* sqrt(x) > y is always false, when y is very large
5842 and we don't care about infinities. */
5843 return omit_one_operand (type, integer_zero_node, arg);
5846 /* sqrt(x) > c is the same as x > c*c. */
5847 return fold_build2 (code, type, arg,
5848 build_real (TREE_TYPE (arg), c2));
5850 else if (code == LT_EXPR || code == LE_EXPR)
5854 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5855 real_convert (&c2, mode, &c2);
5857 if (REAL_VALUE_ISINF (c2))
5859 /* sqrt(x) < y is always true, when y is a very large
5860 value and we don't care about NaNs or Infinities. */
5861 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5862 return omit_one_operand (type, integer_one_node, arg);
5864 /* sqrt(x) < y is x != +Inf when y is very large and we
5865 don't care about NaNs. */
5866 if (! HONOR_NANS (mode))
5867 return fold_build2 (NE_EXPR, type, arg,
5868 build_real (TREE_TYPE (arg), c2));
5870 /* sqrt(x) < y is x >= 0 when y is very large and we
5871 don't care about Infinities. */
5872 if (! HONOR_INFINITIES (mode))
5873 return fold_build2 (GE_EXPR, type, arg,
5874 build_real (TREE_TYPE (arg), dconst0));
5876 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5877 if (lang_hooks.decls.global_bindings_p () != 0
5878 || CONTAINS_PLACEHOLDER_P (arg))
5881 arg = save_expr (arg);
5882 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5883 fold_build2 (GE_EXPR, type, arg,
5884 build_real (TREE_TYPE (arg),
5886 fold_build2 (NE_EXPR, type, arg,
5887 build_real (TREE_TYPE (arg),
5891 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5892 if (! HONOR_NANS (mode))
5893 return fold_build2 (code, type, arg,
5894 build_real (TREE_TYPE (arg), c2));
5896 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5897 if (lang_hooks.decls.global_bindings_p () == 0
5898 && ! CONTAINS_PLACEHOLDER_P (arg))
5900 arg = save_expr (arg);
5901 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5902 fold_build2 (GE_EXPR, type, arg,
5903 build_real (TREE_TYPE (arg),
5905 fold_build2 (code, type, arg,
5906 build_real (TREE_TYPE (arg),
5915 /* Subroutine of fold() that optimizes comparisons against Infinities,
5916 either +Inf or -Inf.
5918 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5919 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5920 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5922 The function returns the constant folded tree if a simplification
5923 can be made, and NULL_TREE otherwise. */
5926 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5928 enum machine_mode mode;
5929 REAL_VALUE_TYPE max;
5933 mode = TYPE_MODE (TREE_TYPE (arg0));
5935 /* For negative infinity swap the sense of the comparison. */
5936 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5938 code = swap_tree_comparison (code);
5943 /* x > +Inf is always false, if with ignore sNANs. */
5944 if (HONOR_SNANS (mode))
5946 return omit_one_operand (type, integer_zero_node, arg0);
5949 /* x <= +Inf is always true, if we don't case about NaNs. */
5950 if (! HONOR_NANS (mode))
5951 return omit_one_operand (type, integer_one_node, arg0);
5953 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5954 if (lang_hooks.decls.global_bindings_p () == 0
5955 && ! CONTAINS_PLACEHOLDER_P (arg0))
5957 arg0 = save_expr (arg0);
5958 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5964 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5965 real_maxval (&max, neg, mode);
5966 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5967 arg0, build_real (TREE_TYPE (arg0), max));
5970 /* x < +Inf is always equal to x <= DBL_MAX. */
5971 real_maxval (&max, neg, mode);
5972 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5973 arg0, build_real (TREE_TYPE (arg0), max));
5976 /* x != +Inf is always equal to !(x > DBL_MAX). */
5977 real_maxval (&max, neg, mode);
5978 if (! HONOR_NANS (mode))
5979 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5980 arg0, build_real (TREE_TYPE (arg0), max));
5982 /* The transformation below creates non-gimple code and thus is
5983 not appropriate if we are in gimple form. */
5987 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5988 arg0, build_real (TREE_TYPE (arg0), max));
5989 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5998 /* Subroutine of fold() that optimizes comparisons of a division by
5999 a nonzero integer constant against an integer constant, i.e.
6002 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6003 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6004 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6006 The function returns the constant folded tree if a simplification
6007 can be made, and NULL_TREE otherwise. */
6010 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6012 tree prod, tmp, hi, lo;
6013 tree arg00 = TREE_OPERAND (arg0, 0);
6014 tree arg01 = TREE_OPERAND (arg0, 1);
6015 unsigned HOST_WIDE_INT lpart;
6016 HOST_WIDE_INT hpart;
6019 /* We have to do this the hard way to detect unsigned overflow.
6020 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6021 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6022 TREE_INT_CST_HIGH (arg01),
6023 TREE_INT_CST_LOW (arg1),
6024 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6025 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6026 prod = force_fit_type (prod, -1, overflow, false);
6028 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6030 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6033 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6034 overflow = add_double (TREE_INT_CST_LOW (prod),
6035 TREE_INT_CST_HIGH (prod),
6036 TREE_INT_CST_LOW (tmp),
6037 TREE_INT_CST_HIGH (tmp),
6039 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6040 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6041 TREE_CONSTANT_OVERFLOW (prod));
6043 else if (tree_int_cst_sgn (arg01) >= 0)
6045 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6046 switch (tree_int_cst_sgn (arg1))
6049 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6054 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6059 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6069 /* A negative divisor reverses the relational operators. */
6070 code = swap_tree_comparison (code);
6072 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6073 switch (tree_int_cst_sgn (arg1))
6076 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6081 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6086 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6098 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6099 return omit_one_operand (type, integer_zero_node, arg00);
6100 if (TREE_OVERFLOW (hi))
6101 return fold_build2 (GE_EXPR, type, arg00, lo);
6102 if (TREE_OVERFLOW (lo))
6103 return fold_build2 (LE_EXPR, type, arg00, hi);
6104 return build_range_check (type, arg00, 1, lo, hi);
6107 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6108 return omit_one_operand (type, integer_one_node, arg00);
6109 if (TREE_OVERFLOW (hi))
6110 return fold_build2 (LT_EXPR, type, arg00, lo);
6111 if (TREE_OVERFLOW (lo))
6112 return fold_build2 (GT_EXPR, type, arg00, hi);
6113 return build_range_check (type, arg00, 0, lo, hi);
6116 if (TREE_OVERFLOW (lo))
6117 return omit_one_operand (type, integer_zero_node, arg00);
6118 return fold_build2 (LT_EXPR, type, arg00, lo);
6121 if (TREE_OVERFLOW (hi))
6122 return omit_one_operand (type, integer_one_node, arg00);
6123 return fold_build2 (LE_EXPR, type, arg00, hi);
6126 if (TREE_OVERFLOW (hi))
6127 return omit_one_operand (type, integer_zero_node, arg00);
6128 return fold_build2 (GT_EXPR, type, arg00, hi);
6131 if (TREE_OVERFLOW (lo))
6132 return omit_one_operand (type, integer_one_node, arg00);
6133 return fold_build2 (GE_EXPR, type, arg00, lo);
6143 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6144 equality/inequality test, then return a simplified form of the test
6145 using a sign testing. Otherwise return NULL. TYPE is the desired
6149 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6152 /* If this is testing a single bit, we can optimize the test. */
6153 if ((code == NE_EXPR || code == EQ_EXPR)
6154 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6155 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6157 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6158 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6159 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6161 if (arg00 != NULL_TREE
6162 /* This is only a win if casting to a signed type is cheap,
6163 i.e. when arg00's type is not a partial mode. */
6164 && TYPE_PRECISION (TREE_TYPE (arg00))
6165 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6167 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6168 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6169 result_type, fold_convert (stype, arg00),
6170 build_int_cst (stype, 0));
6177 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6178 equality/inequality test, then return a simplified form of
6179 the test using shifts and logical operations. Otherwise return
6180 NULL. TYPE is the desired result type. */
6183 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6186 /* If this is testing a single bit, we can optimize the test. */
6187 if ((code == NE_EXPR || code == EQ_EXPR)
6188 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6189 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6191 tree inner = TREE_OPERAND (arg0, 0);
6192 tree type = TREE_TYPE (arg0);
6193 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6194 enum machine_mode operand_mode = TYPE_MODE (type);
6196 tree signed_type, unsigned_type, intermediate_type;
6199 /* First, see if we can fold the single bit test into a sign-bit
6201 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6206 /* Otherwise we have (A & C) != 0 where C is a single bit,
6207 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6208 Similarly for (A & C) == 0. */
6210 /* If INNER is a right shift of a constant and it plus BITNUM does
6211 not overflow, adjust BITNUM and INNER. */
6212 if (TREE_CODE (inner) == RSHIFT_EXPR
6213 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6214 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6215 && bitnum < TYPE_PRECISION (type)
6216 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6217 bitnum - TYPE_PRECISION (type)))
6219 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6220 inner = TREE_OPERAND (inner, 0);
6223 /* If we are going to be able to omit the AND below, we must do our
6224 operations as unsigned. If we must use the AND, we have a choice.
6225 Normally unsigned is faster, but for some machines signed is. */
6226 #ifdef LOAD_EXTEND_OP
6227 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6228 && !flag_syntax_only) ? 0 : 1;
6233 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6234 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6235 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6236 inner = fold_convert (intermediate_type, inner);
6239 inner = build2 (RSHIFT_EXPR, intermediate_type,
6240 inner, size_int (bitnum));
6242 if (code == EQ_EXPR)
6243 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6244 inner, integer_one_node);
6246 /* Put the AND last so it can combine with more things. */
6247 inner = build2 (BIT_AND_EXPR, intermediate_type,
6248 inner, integer_one_node);
6250 /* Make sure to return the proper type. */
6251 inner = fold_convert (result_type, inner);
6258 /* Check whether we are allowed to reorder operands arg0 and arg1,
6259 such that the evaluation of arg1 occurs before arg0. */
6262 reorder_operands_p (tree arg0, tree arg1)
6264 if (! flag_evaluation_order)
6266 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6268 return ! TREE_SIDE_EFFECTS (arg0)
6269 && ! TREE_SIDE_EFFECTS (arg1);
6272 /* Test whether it is preferable two swap two operands, ARG0 and
6273 ARG1, for example because ARG0 is an integer constant and ARG1
6274 isn't. If REORDER is true, only recommend swapping if we can
6275 evaluate the operands in reverse order. */
6278 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6280 STRIP_SIGN_NOPS (arg0);
6281 STRIP_SIGN_NOPS (arg1);
6283 if (TREE_CODE (arg1) == INTEGER_CST)
6285 if (TREE_CODE (arg0) == INTEGER_CST)
6288 if (TREE_CODE (arg1) == REAL_CST)
6290 if (TREE_CODE (arg0) == REAL_CST)
6293 if (TREE_CODE (arg1) == COMPLEX_CST)
6295 if (TREE_CODE (arg0) == COMPLEX_CST)
6298 if (TREE_CONSTANT (arg1))
6300 if (TREE_CONSTANT (arg0))
6306 if (reorder && flag_evaluation_order
6307 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6315 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6316 for commutative and comparison operators. Ensuring a canonical
6317 form allows the optimizers to find additional redundancies without
6318 having to explicitly check for both orderings. */
6319 if (TREE_CODE (arg0) == SSA_NAME
6320 && TREE_CODE (arg1) == SSA_NAME
6321 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6327 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6328 ARG0 is extended to a wider type. */
6331 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6333 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6335 tree shorter_type, outer_type;
6339 if (arg0_unw == arg0)
6341 shorter_type = TREE_TYPE (arg0_unw);
6343 #ifdef HAVE_canonicalize_funcptr_for_compare
6344 /* Disable this optimization if we're casting a function pointer
6345 type on targets that require function pointer canonicalization. */
6346 if (HAVE_canonicalize_funcptr_for_compare
6347 && TREE_CODE (shorter_type) == POINTER_TYPE
6348 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6352 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6355 arg1_unw = get_unwidened (arg1, shorter_type);
6357 /* If possible, express the comparison in the shorter mode. */
6358 if ((code == EQ_EXPR || code == NE_EXPR
6359 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6360 && (TREE_TYPE (arg1_unw) == shorter_type
6361 || (TREE_CODE (arg1_unw) == INTEGER_CST
6362 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6363 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6364 && int_fits_type_p (arg1_unw, shorter_type))))
6365 return fold_build2 (code, type, arg0_unw,
6366 fold_convert (shorter_type, arg1_unw));
6368 if (TREE_CODE (arg1_unw) != INTEGER_CST
6369 || TREE_CODE (shorter_type) != INTEGER_TYPE
6370 || !int_fits_type_p (arg1_unw, shorter_type))
6373 /* If we are comparing with the integer that does not fit into the range
6374 of the shorter type, the result is known. */
6375 outer_type = TREE_TYPE (arg1_unw);
6376 min = lower_bound_in_type (outer_type, shorter_type);
6377 max = upper_bound_in_type (outer_type, shorter_type);
6379 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6381 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6388 return omit_one_operand (type, integer_zero_node, arg0);
6393 return omit_one_operand (type, integer_one_node, arg0);
6399 return omit_one_operand (type, integer_one_node, arg0);
6401 return omit_one_operand (type, integer_zero_node, arg0);
6406 return omit_one_operand (type, integer_zero_node, arg0);
6408 return omit_one_operand (type, integer_one_node, arg0);
6417 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6418 ARG0 just the signedness is changed. */
6421 fold_sign_changed_comparison (enum tree_code code, tree type,
6422 tree arg0, tree arg1)
6424 tree arg0_inner, tmp;
6425 tree inner_type, outer_type;
6427 if (TREE_CODE (arg0) != NOP_EXPR
6428 && TREE_CODE (arg0) != CONVERT_EXPR)
6431 outer_type = TREE_TYPE (arg0);
6432 arg0_inner = TREE_OPERAND (arg0, 0);
6433 inner_type = TREE_TYPE (arg0_inner);
6435 #ifdef HAVE_canonicalize_funcptr_for_compare
6436 /* Disable this optimization if we're casting a function pointer
6437 type on targets that require function pointer canonicalization. */
6438 if (HAVE_canonicalize_funcptr_for_compare
6439 && TREE_CODE (inner_type) == POINTER_TYPE
6440 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6444 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6447 if (TREE_CODE (arg1) != INTEGER_CST
6448 && !((TREE_CODE (arg1) == NOP_EXPR
6449 || TREE_CODE (arg1) == CONVERT_EXPR)
6450 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6453 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6458 if (TREE_CODE (arg1) == INTEGER_CST)
6460 tmp = build_int_cst_wide (inner_type,
6461 TREE_INT_CST_LOW (arg1),
6462 TREE_INT_CST_HIGH (arg1));
6463 arg1 = force_fit_type (tmp, 0,
6464 TREE_OVERFLOW (arg1),
6465 TREE_CONSTANT_OVERFLOW (arg1));
6468 arg1 = fold_convert (inner_type, arg1);
6470 return fold_build2 (code, type, arg0_inner, arg1);
6473 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6474 step of the array. Reconstructs s and delta in the case of s * delta
6475 being an integer constant (and thus already folded).
6476 ADDR is the address. MULT is the multiplicative expression.
6477 If the function succeeds, the new address expression is returned. Otherwise
6478 NULL_TREE is returned. */
6481 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6483 tree s, delta, step;
6484 tree ref = TREE_OPERAND (addr, 0), pref;
6488 /* Canonicalize op1 into a possibly non-constant delta
6489 and an INTEGER_CST s. */
6490 if (TREE_CODE (op1) == MULT_EXPR)
6492 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6497 if (TREE_CODE (arg0) == INTEGER_CST)
6502 else if (TREE_CODE (arg1) == INTEGER_CST)
6510 else if (TREE_CODE (op1) == INTEGER_CST)
6517 /* Simulate we are delta * 1. */
6519 s = integer_one_node;
6522 for (;; ref = TREE_OPERAND (ref, 0))
6524 if (TREE_CODE (ref) == ARRAY_REF)
6526 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6530 step = array_ref_element_size (ref);
6531 if (TREE_CODE (step) != INTEGER_CST)
6536 if (! tree_int_cst_equal (step, s))
6541 /* Try if delta is a multiple of step. */
6542 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6551 if (!handled_component_p (ref))
6555 /* We found the suitable array reference. So copy everything up to it,
6556 and replace the index. */
6558 pref = TREE_OPERAND (addr, 0);
6559 ret = copy_node (pref);
6564 pref = TREE_OPERAND (pref, 0);
6565 TREE_OPERAND (pos, 0) = copy_node (pref);
6566 pos = TREE_OPERAND (pos, 0);
6569 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6570 fold_convert (itype,
6571 TREE_OPERAND (pos, 1)),
6572 fold_convert (itype, delta));
6574 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6578 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6579 means A >= Y && A != MAX, but in this case we know that
6580 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6583 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6585 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6587 if (TREE_CODE (bound) == LT_EXPR)
6588 a = TREE_OPERAND (bound, 0);
6589 else if (TREE_CODE (bound) == GT_EXPR)
6590 a = TREE_OPERAND (bound, 1);
6594 typea = TREE_TYPE (a);
6595 if (!INTEGRAL_TYPE_P (typea)
6596 && !POINTER_TYPE_P (typea))
6599 if (TREE_CODE (ineq) == LT_EXPR)
6601 a1 = TREE_OPERAND (ineq, 1);
6602 y = TREE_OPERAND (ineq, 0);
6604 else if (TREE_CODE (ineq) == GT_EXPR)
6606 a1 = TREE_OPERAND (ineq, 0);
6607 y = TREE_OPERAND (ineq, 1);
6612 if (TREE_TYPE (a1) != typea)
6615 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6616 if (!integer_onep (diff))
6619 return fold_build2 (GE_EXPR, type, a, y);
6622 /* Fold a sum or difference of at least one multiplication.
6623 Returns the folded tree or NULL if no simplification could be made. */
6626 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6628 tree arg00, arg01, arg10, arg11;
6629 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6631 /* (A * C) +- (B * C) -> (A+-B) * C.
6632 (A * C) +- A -> A * (C+-1).
6633 We are most concerned about the case where C is a constant,
6634 but other combinations show up during loop reduction. Since
6635 it is not difficult, try all four possibilities. */
6637 if (TREE_CODE (arg0) == MULT_EXPR)
6639 arg00 = TREE_OPERAND (arg0, 0);
6640 arg01 = TREE_OPERAND (arg0, 1);
6645 if (!FLOAT_TYPE_P (type))
6646 arg01 = build_int_cst (type, 1);
6648 arg01 = build_real (type, dconst1);
6650 if (TREE_CODE (arg1) == MULT_EXPR)
6652 arg10 = TREE_OPERAND (arg1, 0);
6653 arg11 = TREE_OPERAND (arg1, 1);
6658 if (!FLOAT_TYPE_P (type))
6659 arg11 = build_int_cst (type, 1);
6661 arg11 = build_real (type, dconst1);
6665 if (operand_equal_p (arg01, arg11, 0))
6666 same = arg01, alt0 = arg00, alt1 = arg10;
6667 else if (operand_equal_p (arg00, arg10, 0))
6668 same = arg00, alt0 = arg01, alt1 = arg11;
6669 else if (operand_equal_p (arg00, arg11, 0))
6670 same = arg00, alt0 = arg01, alt1 = arg10;
6671 else if (operand_equal_p (arg01, arg10, 0))
6672 same = arg01, alt0 = arg00, alt1 = arg11;
6674 /* No identical multiplicands; see if we can find a common
6675 power-of-two factor in non-power-of-two multiplies. This
6676 can help in multi-dimensional array access. */
6677 else if (host_integerp (arg01, 0)
6678 && host_integerp (arg11, 0))
6680 HOST_WIDE_INT int01, int11, tmp;
6683 int01 = TREE_INT_CST_LOW (arg01);
6684 int11 = TREE_INT_CST_LOW (arg11);
6686 /* Move min of absolute values to int11. */
6687 if ((int01 >= 0 ? int01 : -int01)
6688 < (int11 >= 0 ? int11 : -int11))
6690 tmp = int01, int01 = int11, int11 = tmp;
6691 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6698 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6700 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6701 build_int_cst (TREE_TYPE (arg00),
6706 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6711 return fold_build2 (MULT_EXPR, type,
6712 fold_build2 (code, type,
6713 fold_convert (type, alt0),
6714 fold_convert (type, alt1)),
6715 fold_convert (type, same));
6720 /* Fold a unary expression of code CODE and type TYPE with operand
6721 OP0. Return the folded expression if folding is successful.
6722 Otherwise, return NULL_TREE. */
6725 fold_unary (enum tree_code code, tree type, tree op0)
6729 enum tree_code_class kind = TREE_CODE_CLASS (code);
6731 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6732 && TREE_CODE_LENGTH (code) == 1);
6737 if (code == NOP_EXPR || code == CONVERT_EXPR
6738 || code == FLOAT_EXPR || code == ABS_EXPR)
6740 /* Don't use STRIP_NOPS, because signedness of argument type
6742 STRIP_SIGN_NOPS (arg0);
6746 /* Strip any conversions that don't change the mode. This
6747 is safe for every expression, except for a comparison
6748 expression because its signedness is derived from its
6751 Note that this is done as an internal manipulation within
6752 the constant folder, in order to find the simplest
6753 representation of the arguments so that their form can be
6754 studied. In any cases, the appropriate type conversions
6755 should be put back in the tree that will get out of the
6761 if (TREE_CODE_CLASS (code) == tcc_unary)
6763 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6764 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6765 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6766 else if (TREE_CODE (arg0) == COND_EXPR)
6768 tree arg01 = TREE_OPERAND (arg0, 1);
6769 tree arg02 = TREE_OPERAND (arg0, 2);
6770 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6771 arg01 = fold_build1 (code, type, arg01);
6772 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6773 arg02 = fold_build1 (code, type, arg02);
6774 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6777 /* If this was a conversion, and all we did was to move into
6778 inside the COND_EXPR, bring it back out. But leave it if
6779 it is a conversion from integer to integer and the
6780 result precision is no wider than a word since such a
6781 conversion is cheap and may be optimized away by combine,
6782 while it couldn't if it were outside the COND_EXPR. Then return
6783 so we don't get into an infinite recursion loop taking the
6784 conversion out and then back in. */
6786 if ((code == NOP_EXPR || code == CONVERT_EXPR
6787 || code == NON_LVALUE_EXPR)
6788 && TREE_CODE (tem) == COND_EXPR
6789 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6790 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6791 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6792 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6793 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6794 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6795 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6797 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6798 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6799 || flag_syntax_only))
6800 tem = build1 (code, type,
6802 TREE_TYPE (TREE_OPERAND
6803 (TREE_OPERAND (tem, 1), 0)),
6804 TREE_OPERAND (tem, 0),
6805 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6806 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6809 else if (COMPARISON_CLASS_P (arg0))
6811 if (TREE_CODE (type) == BOOLEAN_TYPE)
6813 arg0 = copy_node (arg0);
6814 TREE_TYPE (arg0) = type;
6817 else if (TREE_CODE (type) != INTEGER_TYPE)
6818 return fold_build3 (COND_EXPR, type, arg0,
6819 fold_build1 (code, type,
6821 fold_build1 (code, type,
6822 integer_zero_node));
6831 case FIX_TRUNC_EXPR:
6833 case FIX_FLOOR_EXPR:
6834 case FIX_ROUND_EXPR:
6835 if (TREE_TYPE (op0) == type)
6838 /* If we have (type) (a CMP b) and type is an integral type, return
6839 new expression involving the new type. */
6840 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6841 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6842 TREE_OPERAND (op0, 1));
6844 /* Handle cases of two conversions in a row. */
6845 if (TREE_CODE (op0) == NOP_EXPR
6846 || TREE_CODE (op0) == CONVERT_EXPR)
6848 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6849 tree inter_type = TREE_TYPE (op0);
6850 int inside_int = INTEGRAL_TYPE_P (inside_type);
6851 int inside_ptr = POINTER_TYPE_P (inside_type);
6852 int inside_float = FLOAT_TYPE_P (inside_type);
6853 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6854 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6855 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6856 int inter_int = INTEGRAL_TYPE_P (inter_type);
6857 int inter_ptr = POINTER_TYPE_P (inter_type);
6858 int inter_float = FLOAT_TYPE_P (inter_type);
6859 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6860 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6861 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6862 int final_int = INTEGRAL_TYPE_P (type);
6863 int final_ptr = POINTER_TYPE_P (type);
6864 int final_float = FLOAT_TYPE_P (type);
6865 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6866 unsigned int final_prec = TYPE_PRECISION (type);
6867 int final_unsignedp = TYPE_UNSIGNED (type);
6869 /* In addition to the cases of two conversions in a row
6870 handled below, if we are converting something to its own
6871 type via an object of identical or wider precision, neither
6872 conversion is needed. */
6873 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6874 && ((inter_int && final_int) || (inter_float && final_float))
6875 && inter_prec >= final_prec)
6876 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6878 /* Likewise, if the intermediate and final types are either both
6879 float or both integer, we don't need the middle conversion if
6880 it is wider than the final type and doesn't change the signedness
6881 (for integers). Avoid this if the final type is a pointer
6882 since then we sometimes need the inner conversion. Likewise if
6883 the outer has a precision not equal to the size of its mode. */
6884 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6885 || (inter_float && inside_float)
6886 || (inter_vec && inside_vec))
6887 && inter_prec >= inside_prec
6888 && (inter_float || inter_vec
6889 || inter_unsignedp == inside_unsignedp)
6890 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6891 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6893 && (! final_vec || inter_prec == inside_prec))
6894 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6896 /* If we have a sign-extension of a zero-extended value, we can
6897 replace that by a single zero-extension. */
6898 if (inside_int && inter_int && final_int
6899 && inside_prec < inter_prec && inter_prec < final_prec
6900 && inside_unsignedp && !inter_unsignedp)
6901 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6903 /* Two conversions in a row are not needed unless:
6904 - some conversion is floating-point (overstrict for now), or
6905 - some conversion is a vector (overstrict for now), or
6906 - the intermediate type is narrower than both initial and
6908 - the intermediate type and innermost type differ in signedness,
6909 and the outermost type is wider than the intermediate, or
6910 - the initial type is a pointer type and the precisions of the
6911 intermediate and final types differ, or
6912 - the final type is a pointer type and the precisions of the
6913 initial and intermediate types differ. */
6914 if (! inside_float && ! inter_float && ! final_float
6915 && ! inside_vec && ! inter_vec && ! final_vec
6916 && (inter_prec > inside_prec || inter_prec > final_prec)
6917 && ! (inside_int && inter_int
6918 && inter_unsignedp != inside_unsignedp
6919 && inter_prec < final_prec)
6920 && ((inter_unsignedp && inter_prec > inside_prec)
6921 == (final_unsignedp && final_prec > inter_prec))
6922 && ! (inside_ptr && inter_prec != final_prec)
6923 && ! (final_ptr && inside_prec != inter_prec)
6924 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6925 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6927 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6930 /* Handle (T *)&A.B.C for A being of type T and B and C
6931 living at offset zero. This occurs frequently in
6932 C++ upcasting and then accessing the base. */
6933 if (TREE_CODE (op0) == ADDR_EXPR
6934 && POINTER_TYPE_P (type)
6935 && handled_component_p (TREE_OPERAND (op0, 0)))
6937 HOST_WIDE_INT bitsize, bitpos;
6939 enum machine_mode mode;
6940 int unsignedp, volatilep;
6941 tree base = TREE_OPERAND (op0, 0);
6942 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6943 &mode, &unsignedp, &volatilep, false);
6944 /* If the reference was to a (constant) zero offset, we can use
6945 the address of the base if it has the same base type
6946 as the result type. */
6947 if (! offset && bitpos == 0
6948 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6949 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6950 return fold_convert (type, build_fold_addr_expr (base));
6953 if (TREE_CODE (op0) == MODIFY_EXPR
6954 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6955 /* Detect assigning a bitfield. */
6956 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6957 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6959 /* Don't leave an assignment inside a conversion
6960 unless assigning a bitfield. */
6961 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6962 /* First do the assignment, then return converted constant. */
6963 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6964 TREE_NO_WARNING (tem) = 1;
6965 TREE_USED (tem) = 1;
6969 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6970 constants (if x has signed type, the sign bit cannot be set
6971 in c). This folds extension into the BIT_AND_EXPR. */
6972 if (INTEGRAL_TYPE_P (type)
6973 && TREE_CODE (type) != BOOLEAN_TYPE
6974 && TREE_CODE (op0) == BIT_AND_EXPR
6975 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6978 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6981 if (TYPE_UNSIGNED (TREE_TYPE (and))
6982 || (TYPE_PRECISION (type)
6983 <= TYPE_PRECISION (TREE_TYPE (and))))
6985 else if (TYPE_PRECISION (TREE_TYPE (and1))
6986 <= HOST_BITS_PER_WIDE_INT
6987 && host_integerp (and1, 1))
6989 unsigned HOST_WIDE_INT cst;
6991 cst = tree_low_cst (and1, 1);
6992 cst &= (HOST_WIDE_INT) -1
6993 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6994 change = (cst == 0);
6995 #ifdef LOAD_EXTEND_OP
6997 && !flag_syntax_only
6998 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7001 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7002 and0 = fold_convert (uns, and0);
7003 and1 = fold_convert (uns, and1);
7009 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7010 TREE_INT_CST_HIGH (and1));
7011 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7012 TREE_CONSTANT_OVERFLOW (and1));
7013 return fold_build2 (BIT_AND_EXPR, type,
7014 fold_convert (type, and0), tem);
7018 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7019 T2 being pointers to types of the same size. */
7020 if (POINTER_TYPE_P (type)
7021 && BINARY_CLASS_P (arg0)
7022 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7023 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7025 tree arg00 = TREE_OPERAND (arg0, 0);
7027 tree t1 = TREE_TYPE (arg00);
7028 tree tt0 = TREE_TYPE (t0);
7029 tree tt1 = TREE_TYPE (t1);
7030 tree s0 = TYPE_SIZE (tt0);
7031 tree s1 = TYPE_SIZE (tt1);
7033 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7034 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7035 TREE_OPERAND (arg0, 1));
7038 tem = fold_convert_const (code, type, arg0);
7039 return tem ? tem : NULL_TREE;
7041 case VIEW_CONVERT_EXPR:
7042 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7043 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7047 if (negate_expr_p (arg0))
7048 return fold_convert (type, negate_expr (arg0));
7052 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7053 return fold_abs_const (arg0, type);
7054 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7055 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7056 /* Convert fabs((double)float) into (double)fabsf(float). */
7057 else if (TREE_CODE (arg0) == NOP_EXPR
7058 && TREE_CODE (type) == REAL_TYPE)
7060 tree targ0 = strip_float_extensions (arg0);
7062 return fold_convert (type, fold_build1 (ABS_EXPR,
7066 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7067 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7070 /* Strip sign ops from argument. */
7071 if (TREE_CODE (type) == REAL_TYPE)
7073 tem = fold_strip_sign_ops (arg0);
7075 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7080 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7081 return fold_convert (type, arg0);
7082 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7083 return build2 (COMPLEX_EXPR, type,
7084 TREE_OPERAND (arg0, 0),
7085 negate_expr (TREE_OPERAND (arg0, 1)));
7086 else if (TREE_CODE (arg0) == COMPLEX_CST)
7087 return build_complex (type, TREE_REALPART (arg0),
7088 negate_expr (TREE_IMAGPART (arg0)));
7089 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7090 return fold_build2 (TREE_CODE (arg0), type,
7091 fold_build1 (CONJ_EXPR, type,
7092 TREE_OPERAND (arg0, 0)),
7093 fold_build1 (CONJ_EXPR, type,
7094 TREE_OPERAND (arg0, 1)));
7095 else if (TREE_CODE (arg0) == CONJ_EXPR)
7096 return TREE_OPERAND (arg0, 0);
7100 if (TREE_CODE (arg0) == INTEGER_CST)
7101 return fold_not_const (arg0, type);
7102 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7103 return TREE_OPERAND (arg0, 0);
7104 /* Convert ~ (-A) to A - 1. */
7105 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7106 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7107 build_int_cst (type, 1));
7108 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7109 else if (INTEGRAL_TYPE_P (type)
7110 && ((TREE_CODE (arg0) == MINUS_EXPR
7111 && integer_onep (TREE_OPERAND (arg0, 1)))
7112 || (TREE_CODE (arg0) == PLUS_EXPR
7113 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7114 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7115 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7116 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7117 && (tem = fold_unary (BIT_NOT_EXPR, type,
7119 TREE_OPERAND (arg0, 0)))))
7120 return fold_build2 (BIT_XOR_EXPR, type, tem,
7121 fold_convert (type, TREE_OPERAND (arg0, 1)));
7122 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7123 && (tem = fold_unary (BIT_NOT_EXPR, type,
7125 TREE_OPERAND (arg0, 1)))))
7126 return fold_build2 (BIT_XOR_EXPR, type,
7127 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7131 case TRUTH_NOT_EXPR:
7132 /* The argument to invert_truthvalue must have Boolean type. */
7133 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7134 arg0 = fold_convert (boolean_type_node, arg0);
7136 /* Note that the operand of this must be an int
7137 and its values must be 0 or 1.
7138 ("true" is a fixed value perhaps depending on the language,
7139 but we don't handle values other than 1 correctly yet.) */
7140 tem = invert_truthvalue (arg0);
7141 /* Avoid infinite recursion. */
7142 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7144 return fold_convert (type, tem);
7147 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7149 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7150 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7151 TREE_OPERAND (arg0, 1));
7152 else if (TREE_CODE (arg0) == COMPLEX_CST)
7153 return TREE_REALPART (arg0);
7154 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7155 return fold_build2 (TREE_CODE (arg0), type,
7156 fold_build1 (REALPART_EXPR, type,
7157 TREE_OPERAND (arg0, 0)),
7158 fold_build1 (REALPART_EXPR, type,
7159 TREE_OPERAND (arg0, 1)));
7163 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7164 return fold_convert (type, integer_zero_node);
7165 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7166 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7167 TREE_OPERAND (arg0, 0));
7168 else if (TREE_CODE (arg0) == COMPLEX_CST)
7169 return TREE_IMAGPART (arg0);
7170 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7171 return fold_build2 (TREE_CODE (arg0), type,
7172 fold_build1 (IMAGPART_EXPR, type,
7173 TREE_OPERAND (arg0, 0)),
7174 fold_build1 (IMAGPART_EXPR, type,
7175 TREE_OPERAND (arg0, 1)));
7180 } /* switch (code) */
7183 /* Fold a binary expression of code CODE and type TYPE with operands
7184 OP0 and OP1. Return the folded expression if folding is
7185 successful. Otherwise, return NULL_TREE. */
7188 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7190 tree t1 = NULL_TREE;
7192 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7193 enum tree_code_class kind = TREE_CODE_CLASS (code);
7195 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7196 && TREE_CODE_LENGTH (code) == 2
7198 && op1 != NULL_TREE);
7203 /* Strip any conversions that don't change the mode. This is
7204 safe for every expression, except for a comparison expression
7205 because its signedness is derived from its operands. So, in
7206 the latter case, only strip conversions that don't change the
7209 Note that this is done as an internal manipulation within the
7210 constant folder, in order to find the simplest representation
7211 of the arguments so that their form can be studied. In any
7212 cases, the appropriate type conversions should be put back in
7213 the tree that will get out of the constant folder. */
7215 if (kind == tcc_comparison)
7217 STRIP_SIGN_NOPS (arg0);
7218 STRIP_SIGN_NOPS (arg1);
7226 /* Note that TREE_CONSTANT isn't enough: static var addresses are
7227 constant but we can't do arithmetic on them. */
7228 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7229 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7230 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
7231 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
7233 if (kind == tcc_binary)
7234 tem = const_binop (code, arg0, arg1, 0);
7235 else if (kind == tcc_comparison)
7236 tem = fold_relational_const (code, type, arg0, arg1);
7240 if (tem != NULL_TREE)
7242 if (TREE_TYPE (tem) != type)
7243 tem = fold_convert (type, tem);
7248 /* If this is a commutative operation, and ARG0 is a constant, move it
7249 to ARG1 to reduce the number of tests below. */
7250 if (commutative_tree_code (code)
7251 && tree_swap_operands_p (arg0, arg1, true))
7252 return fold_build2 (code, type, op1, op0);
7254 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
7256 First check for cases where an arithmetic operation is applied to a
7257 compound, conditional, or comparison operation. Push the arithmetic
7258 operation inside the compound or conditional to see if any folding
7259 can then be done. Convert comparison to conditional for this purpose.
7260 The also optimizes non-constant cases that used to be done in
7263 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7264 one of the operands is a comparison and the other is a comparison, a
7265 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7266 code below would make the expression more complex. Change it to a
7267 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7268 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7270 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7271 || code == EQ_EXPR || code == NE_EXPR)
7272 && ((truth_value_p (TREE_CODE (arg0))
7273 && (truth_value_p (TREE_CODE (arg1))
7274 || (TREE_CODE (arg1) == BIT_AND_EXPR
7275 && integer_onep (TREE_OPERAND (arg1, 1)))))
7276 || (truth_value_p (TREE_CODE (arg1))
7277 && (truth_value_p (TREE_CODE (arg0))
7278 || (TREE_CODE (arg0) == BIT_AND_EXPR
7279 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7281 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7282 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7285 fold_convert (boolean_type_node, arg0),
7286 fold_convert (boolean_type_node, arg1));
7288 if (code == EQ_EXPR)
7289 tem = invert_truthvalue (tem);
7291 return fold_convert (type, tem);
7294 if (TREE_CODE_CLASS (code) == tcc_binary
7295 || TREE_CODE_CLASS (code) == tcc_comparison)
7297 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7298 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7299 fold_build2 (code, type,
7300 TREE_OPERAND (arg0, 1), op1));
7301 if (TREE_CODE (arg1) == COMPOUND_EXPR
7302 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7303 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7304 fold_build2 (code, type,
7305 op0, TREE_OPERAND (arg1, 1)));
7307 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7309 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7311 /*cond_first_p=*/1);
7312 if (tem != NULL_TREE)
7316 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7318 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7320 /*cond_first_p=*/0);
7321 if (tem != NULL_TREE)
7329 /* A + (-B) -> A - B */
7330 if (TREE_CODE (arg1) == NEGATE_EXPR)
7331 return fold_build2 (MINUS_EXPR, type,
7332 fold_convert (type, arg0),
7333 fold_convert (type, TREE_OPERAND (arg1, 0)));
7334 /* (-A) + B -> B - A */
7335 if (TREE_CODE (arg0) == NEGATE_EXPR
7336 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7337 return fold_build2 (MINUS_EXPR, type,
7338 fold_convert (type, arg1),
7339 fold_convert (type, TREE_OPERAND (arg0, 0)));
7340 /* Convert ~A + 1 to -A. */
7341 if (INTEGRAL_TYPE_P (type)
7342 && TREE_CODE (arg0) == BIT_NOT_EXPR
7343 && integer_onep (arg1))
7344 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7346 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7348 if ((TREE_CODE (arg0) == MULT_EXPR
7349 || TREE_CODE (arg1) == MULT_EXPR)
7350 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7352 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7357 if (! FLOAT_TYPE_P (type))
7359 if (integer_zerop (arg1))
7360 return non_lvalue (fold_convert (type, arg0));
7362 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7363 with a constant, and the two constants have no bits in common,
7364 we should treat this as a BIT_IOR_EXPR since this may produce more
7366 if (TREE_CODE (arg0) == BIT_AND_EXPR
7367 && TREE_CODE (arg1) == BIT_AND_EXPR
7368 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7369 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7370 && integer_zerop (const_binop (BIT_AND_EXPR,
7371 TREE_OPERAND (arg0, 1),
7372 TREE_OPERAND (arg1, 1), 0)))
7374 code = BIT_IOR_EXPR;
7378 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7379 (plus (plus (mult) (mult)) (foo)) so that we can
7380 take advantage of the factoring cases below. */
7381 if (((TREE_CODE (arg0) == PLUS_EXPR
7382 || TREE_CODE (arg0) == MINUS_EXPR)
7383 && TREE_CODE (arg1) == MULT_EXPR)
7384 || ((TREE_CODE (arg1) == PLUS_EXPR
7385 || TREE_CODE (arg1) == MINUS_EXPR)
7386 && TREE_CODE (arg0) == MULT_EXPR))
7388 tree parg0, parg1, parg, marg;
7389 enum tree_code pcode;
7391 if (TREE_CODE (arg1) == MULT_EXPR)
7392 parg = arg0, marg = arg1;
7394 parg = arg1, marg = arg0;
7395 pcode = TREE_CODE (parg);
7396 parg0 = TREE_OPERAND (parg, 0);
7397 parg1 = TREE_OPERAND (parg, 1);
7401 if (TREE_CODE (parg0) == MULT_EXPR
7402 && TREE_CODE (parg1) != MULT_EXPR)
7403 return fold_build2 (pcode, type,
7404 fold_build2 (PLUS_EXPR, type,
7405 fold_convert (type, parg0),
7406 fold_convert (type, marg)),
7407 fold_convert (type, parg1));
7408 if (TREE_CODE (parg0) != MULT_EXPR
7409 && TREE_CODE (parg1) == MULT_EXPR)
7410 return fold_build2 (PLUS_EXPR, type,
7411 fold_convert (type, parg0),
7412 fold_build2 (pcode, type,
7413 fold_convert (type, marg),
7418 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7419 of the array. Loop optimizer sometimes produce this type of
7421 if (TREE_CODE (arg0) == ADDR_EXPR)
7423 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7425 return fold_convert (type, tem);
7427 else if (TREE_CODE (arg1) == ADDR_EXPR)
7429 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7431 return fold_convert (type, tem);
7436 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7437 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7438 return non_lvalue (fold_convert (type, arg0));
7440 /* Likewise if the operands are reversed. */
7441 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7442 return non_lvalue (fold_convert (type, arg1));
7444 /* Convert X + -C into X - C. */
7445 if (TREE_CODE (arg1) == REAL_CST
7446 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7448 tem = fold_negate_const (arg1, type);
7449 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7450 return fold_build2 (MINUS_EXPR, type,
7451 fold_convert (type, arg0),
7452 fold_convert (type, tem));
7455 if (flag_unsafe_math_optimizations
7456 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7457 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7458 && (tem = distribute_real_division (code, type, arg0, arg1)))
7461 /* Convert x+x into x*2.0. */
7462 if (operand_equal_p (arg0, arg1, 0)
7463 && SCALAR_FLOAT_TYPE_P (type))
7464 return fold_build2 (MULT_EXPR, type, arg0,
7465 build_real (type, dconst2));
7467 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7468 if (flag_unsafe_math_optimizations
7469 && TREE_CODE (arg1) == PLUS_EXPR
7470 && TREE_CODE (arg0) != MULT_EXPR)
7472 tree tree10 = TREE_OPERAND (arg1, 0);
7473 tree tree11 = TREE_OPERAND (arg1, 1);
7474 if (TREE_CODE (tree11) == MULT_EXPR
7475 && TREE_CODE (tree10) == MULT_EXPR)
7478 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7479 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7482 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7483 if (flag_unsafe_math_optimizations
7484 && TREE_CODE (arg0) == PLUS_EXPR
7485 && TREE_CODE (arg1) != MULT_EXPR)
7487 tree tree00 = TREE_OPERAND (arg0, 0);
7488 tree tree01 = TREE_OPERAND (arg0, 1);
7489 if (TREE_CODE (tree01) == MULT_EXPR
7490 && TREE_CODE (tree00) == MULT_EXPR)
7493 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7494 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7500 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7501 is a rotate of A by C1 bits. */
7502 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7503 is a rotate of A by B bits. */
7505 enum tree_code code0, code1;
7506 code0 = TREE_CODE (arg0);
7507 code1 = TREE_CODE (arg1);
7508 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7509 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7510 && operand_equal_p (TREE_OPERAND (arg0, 0),
7511 TREE_OPERAND (arg1, 0), 0)
7512 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7514 tree tree01, tree11;
7515 enum tree_code code01, code11;
7517 tree01 = TREE_OPERAND (arg0, 1);
7518 tree11 = TREE_OPERAND (arg1, 1);
7519 STRIP_NOPS (tree01);
7520 STRIP_NOPS (tree11);
7521 code01 = TREE_CODE (tree01);
7522 code11 = TREE_CODE (tree11);
7523 if (code01 == INTEGER_CST
7524 && code11 == INTEGER_CST
7525 && TREE_INT_CST_HIGH (tree01) == 0
7526 && TREE_INT_CST_HIGH (tree11) == 0
7527 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7528 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7529 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7530 code0 == LSHIFT_EXPR ? tree01 : tree11);
7531 else if (code11 == MINUS_EXPR)
7533 tree tree110, tree111;
7534 tree110 = TREE_OPERAND (tree11, 0);
7535 tree111 = TREE_OPERAND (tree11, 1);
7536 STRIP_NOPS (tree110);
7537 STRIP_NOPS (tree111);
7538 if (TREE_CODE (tree110) == INTEGER_CST
7539 && 0 == compare_tree_int (tree110,
7541 (TREE_TYPE (TREE_OPERAND
7543 && operand_equal_p (tree01, tree111, 0))
7544 return build2 ((code0 == LSHIFT_EXPR
7547 type, TREE_OPERAND (arg0, 0), tree01);
7549 else if (code01 == MINUS_EXPR)
7551 tree tree010, tree011;
7552 tree010 = TREE_OPERAND (tree01, 0);
7553 tree011 = TREE_OPERAND (tree01, 1);
7554 STRIP_NOPS (tree010);
7555 STRIP_NOPS (tree011);
7556 if (TREE_CODE (tree010) == INTEGER_CST
7557 && 0 == compare_tree_int (tree010,
7559 (TREE_TYPE (TREE_OPERAND
7561 && operand_equal_p (tree11, tree011, 0))
7562 return build2 ((code0 != LSHIFT_EXPR
7565 type, TREE_OPERAND (arg0, 0), tree11);
7571 /* In most languages, can't associate operations on floats through
7572 parentheses. Rather than remember where the parentheses were, we
7573 don't associate floats at all, unless the user has specified
7574 -funsafe-math-optimizations. */
7576 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7578 tree var0, con0, lit0, minus_lit0;
7579 tree var1, con1, lit1, minus_lit1;
7581 /* Split both trees into variables, constants, and literals. Then
7582 associate each group together, the constants with literals,
7583 then the result with variables. This increases the chances of
7584 literals being recombined later and of generating relocatable
7585 expressions for the sum of a constant and literal. */
7586 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7587 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7588 code == MINUS_EXPR);
7590 /* Only do something if we found more than two objects. Otherwise,
7591 nothing has changed and we risk infinite recursion. */
7592 if (2 < ((var0 != 0) + (var1 != 0)
7593 + (con0 != 0) + (con1 != 0)
7594 + (lit0 != 0) + (lit1 != 0)
7595 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7597 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7598 if (code == MINUS_EXPR)
7601 var0 = associate_trees (var0, var1, code, type);
7602 con0 = associate_trees (con0, con1, code, type);
7603 lit0 = associate_trees (lit0, lit1, code, type);
7604 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7606 /* Preserve the MINUS_EXPR if the negative part of the literal is
7607 greater than the positive part. Otherwise, the multiplicative
7608 folding code (i.e extract_muldiv) may be fooled in case
7609 unsigned constants are subtracted, like in the following
7610 example: ((X*2 + 4) - 8U)/2. */
7611 if (minus_lit0 && lit0)
7613 if (TREE_CODE (lit0) == INTEGER_CST
7614 && TREE_CODE (minus_lit0) == INTEGER_CST
7615 && tree_int_cst_lt (lit0, minus_lit0))
7617 minus_lit0 = associate_trees (minus_lit0, lit0,
7623 lit0 = associate_trees (lit0, minus_lit0,
7631 return fold_convert (type,
7632 associate_trees (var0, minus_lit0,
7636 con0 = associate_trees (con0, minus_lit0,
7638 return fold_convert (type,
7639 associate_trees (var0, con0,
7644 con0 = associate_trees (con0, lit0, code, type);
7645 return fold_convert (type, associate_trees (var0, con0,
7653 /* A - (-B) -> A + B */
7654 if (TREE_CODE (arg1) == NEGATE_EXPR)
7655 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7656 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7657 if (TREE_CODE (arg0) == NEGATE_EXPR
7658 && (FLOAT_TYPE_P (type)
7659 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7660 && negate_expr_p (arg1)
7661 && reorder_operands_p (arg0, arg1))
7662 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7663 TREE_OPERAND (arg0, 0));
7664 /* Convert -A - 1 to ~A. */
7665 if (INTEGRAL_TYPE_P (type)
7666 && TREE_CODE (arg0) == NEGATE_EXPR
7667 && integer_onep (arg1))
7668 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7670 /* Convert -1 - A to ~A. */
7671 if (INTEGRAL_TYPE_P (type)
7672 && integer_all_onesp (arg0))
7673 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7675 if (! FLOAT_TYPE_P (type))
7677 if (integer_zerop (arg0))
7678 return negate_expr (fold_convert (type, arg1));
7679 if (integer_zerop (arg1))
7680 return non_lvalue (fold_convert (type, arg0));
7682 /* Fold A - (A & B) into ~B & A. */
7683 if (!TREE_SIDE_EFFECTS (arg0)
7684 && TREE_CODE (arg1) == BIT_AND_EXPR)
7686 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7687 return fold_build2 (BIT_AND_EXPR, type,
7688 fold_build1 (BIT_NOT_EXPR, type,
7689 TREE_OPERAND (arg1, 0)),
7691 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7692 return fold_build2 (BIT_AND_EXPR, type,
7693 fold_build1 (BIT_NOT_EXPR, type,
7694 TREE_OPERAND (arg1, 1)),
7698 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7699 any power of 2 minus 1. */
7700 if (TREE_CODE (arg0) == BIT_AND_EXPR
7701 && TREE_CODE (arg1) == BIT_AND_EXPR
7702 && operand_equal_p (TREE_OPERAND (arg0, 0),
7703 TREE_OPERAND (arg1, 0), 0))
7705 tree mask0 = TREE_OPERAND (arg0, 1);
7706 tree mask1 = TREE_OPERAND (arg1, 1);
7707 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7709 if (operand_equal_p (tem, mask1, 0))
7711 tem = fold_build2 (BIT_XOR_EXPR, type,
7712 TREE_OPERAND (arg0, 0), mask1);
7713 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7718 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7719 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7720 return non_lvalue (fold_convert (type, arg0));
7722 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7723 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7724 (-ARG1 + ARG0) reduces to -ARG1. */
7725 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7726 return negate_expr (fold_convert (type, arg1));
7728 /* Fold &x - &x. This can happen from &x.foo - &x.
7729 This is unsafe for certain floats even in non-IEEE formats.
7730 In IEEE, it is unsafe because it does wrong for NaNs.
7731 Also note that operand_equal_p is always false if an operand
7734 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7735 && operand_equal_p (arg0, arg1, 0))
7736 return fold_convert (type, integer_zero_node);
7738 /* A - B -> A + (-B) if B is easily negatable. */
7739 if (negate_expr_p (arg1)
7740 && ((FLOAT_TYPE_P (type)
7741 /* Avoid this transformation if B is a positive REAL_CST. */
7742 && (TREE_CODE (arg1) != REAL_CST
7743 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7744 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7745 return fold_build2 (PLUS_EXPR, type,
7746 fold_convert (type, arg0),
7747 fold_convert (type, negate_expr (arg1)));
7749 /* Try folding difference of addresses. */
7753 if ((TREE_CODE (arg0) == ADDR_EXPR
7754 || TREE_CODE (arg1) == ADDR_EXPR)
7755 && ptr_difference_const (arg0, arg1, &diff))
7756 return build_int_cst_type (type, diff);
7759 /* Fold &a[i] - &a[j] to i-j. */
7760 if (TREE_CODE (arg0) == ADDR_EXPR
7761 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7762 && TREE_CODE (arg1) == ADDR_EXPR
7763 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7765 tree aref0 = TREE_OPERAND (arg0, 0);
7766 tree aref1 = TREE_OPERAND (arg1, 0);
7767 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7768 TREE_OPERAND (aref1, 0), 0))
7770 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7771 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7772 tree esz = array_ref_element_size (aref0);
7773 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7774 return fold_build2 (MULT_EXPR, type, diff,
7775 fold_convert (type, esz));
7780 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7781 of the array. Loop optimizer sometimes produce this type of
7783 if (TREE_CODE (arg0) == ADDR_EXPR)
7785 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7787 return fold_convert (type, tem);
7790 if (flag_unsafe_math_optimizations
7791 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7792 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7793 && (tem = distribute_real_division (code, type, arg0, arg1)))
7796 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7798 if ((TREE_CODE (arg0) == MULT_EXPR
7799 || TREE_CODE (arg1) == MULT_EXPR)
7800 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7802 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7810 /* (-A) * (-B) -> A * B */
7811 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7812 return fold_build2 (MULT_EXPR, type,
7813 TREE_OPERAND (arg0, 0),
7814 negate_expr (arg1));
7815 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7816 return fold_build2 (MULT_EXPR, type,
7818 TREE_OPERAND (arg1, 0));
7820 if (! FLOAT_TYPE_P (type))
7822 if (integer_zerop (arg1))
7823 return omit_one_operand (type, arg1, arg0);
7824 if (integer_onep (arg1))
7825 return non_lvalue (fold_convert (type, arg0));
7826 /* Transform x * -1 into -x. */
7827 if (integer_all_onesp (arg1))
7828 return fold_convert (type, negate_expr (arg0));
7830 /* (a * (1 << b)) is (a << b) */
7831 if (TREE_CODE (arg1) == LSHIFT_EXPR
7832 && integer_onep (TREE_OPERAND (arg1, 0)))
7833 return fold_build2 (LSHIFT_EXPR, type, arg0,
7834 TREE_OPERAND (arg1, 1));
7835 if (TREE_CODE (arg0) == LSHIFT_EXPR
7836 && integer_onep (TREE_OPERAND (arg0, 0)))
7837 return fold_build2 (LSHIFT_EXPR, type, arg1,
7838 TREE_OPERAND (arg0, 1));
7840 if (TREE_CODE (arg1) == INTEGER_CST
7841 && 0 != (tem = extract_muldiv (op0,
7842 fold_convert (type, arg1),
7844 return fold_convert (type, tem);
7849 /* Maybe fold x * 0 to 0. The expressions aren't the same
7850 when x is NaN, since x * 0 is also NaN. Nor are they the
7851 same in modes with signed zeros, since multiplying a
7852 negative value by 0 gives -0, not +0. */
7853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7854 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7855 && real_zerop (arg1))
7856 return omit_one_operand (type, arg1, arg0);
7857 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7858 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7859 && real_onep (arg1))
7860 return non_lvalue (fold_convert (type, arg0));
7862 /* Transform x * -1.0 into -x. */
7863 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7864 && real_minus_onep (arg1))
7865 return fold_convert (type, negate_expr (arg0));
7867 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7868 if (flag_unsafe_math_optimizations
7869 && TREE_CODE (arg0) == RDIV_EXPR
7870 && TREE_CODE (arg1) == REAL_CST
7871 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7873 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7876 return fold_build2 (RDIV_EXPR, type, tem,
7877 TREE_OPERAND (arg0, 1));
7880 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7881 if (operand_equal_p (arg0, arg1, 0))
7883 tree tem = fold_strip_sign_ops (arg0);
7884 if (tem != NULL_TREE)
7886 tem = fold_convert (type, tem);
7887 return fold_build2 (MULT_EXPR, type, tem, tem);
7891 if (flag_unsafe_math_optimizations)
7893 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7894 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7896 /* Optimizations of root(...)*root(...). */
7897 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7899 tree rootfn, arg, arglist;
7900 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7901 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7903 /* Optimize sqrt(x)*sqrt(x) as x. */
7904 if (BUILTIN_SQRT_P (fcode0)
7905 && operand_equal_p (arg00, arg10, 0)
7906 && ! HONOR_SNANS (TYPE_MODE (type)))
7909 /* Optimize root(x)*root(y) as root(x*y). */
7910 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7911 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7912 arglist = build_tree_list (NULL_TREE, arg);
7913 return build_function_call_expr (rootfn, arglist);
7916 /* Optimize expN(x)*expN(y) as expN(x+y). */
7917 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7919 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7920 tree arg = fold_build2 (PLUS_EXPR, type,
7921 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7922 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7923 tree arglist = build_tree_list (NULL_TREE, arg);
7924 return build_function_call_expr (expfn, arglist);
7927 /* Optimizations of pow(...)*pow(...). */
7928 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7929 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7930 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7932 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7933 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7935 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7936 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7939 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7940 if (operand_equal_p (arg01, arg11, 0))
7942 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7943 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7944 tree arglist = tree_cons (NULL_TREE, arg,
7945 build_tree_list (NULL_TREE,
7947 return build_function_call_expr (powfn, arglist);
7950 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7951 if (operand_equal_p (arg00, arg10, 0))
7953 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7954 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7955 tree arglist = tree_cons (NULL_TREE, arg00,
7956 build_tree_list (NULL_TREE,
7958 return build_function_call_expr (powfn, arglist);
7962 /* Optimize tan(x)*cos(x) as sin(x). */
7963 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7964 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7965 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7966 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7967 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7968 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7969 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7970 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7972 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7974 if (sinfn != NULL_TREE)
7975 return build_function_call_expr (sinfn,
7976 TREE_OPERAND (arg0, 1));
7979 /* Optimize x*pow(x,c) as pow(x,c+1). */
7980 if (fcode1 == BUILT_IN_POW
7981 || fcode1 == BUILT_IN_POWF
7982 || fcode1 == BUILT_IN_POWL)
7984 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7985 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7987 if (TREE_CODE (arg11) == REAL_CST
7988 && ! TREE_CONSTANT_OVERFLOW (arg11)
7989 && operand_equal_p (arg0, arg10, 0))
7991 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7995 c = TREE_REAL_CST (arg11);
7996 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7997 arg = build_real (type, c);
7998 arglist = build_tree_list (NULL_TREE, arg);
7999 arglist = tree_cons (NULL_TREE, arg0, arglist);
8000 return build_function_call_expr (powfn, arglist);
8004 /* Optimize pow(x,c)*x as pow(x,c+1). */
8005 if (fcode0 == BUILT_IN_POW
8006 || fcode0 == BUILT_IN_POWF
8007 || fcode0 == BUILT_IN_POWL)
8009 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8010 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8012 if (TREE_CODE (arg01) == REAL_CST
8013 && ! TREE_CONSTANT_OVERFLOW (arg01)
8014 && operand_equal_p (arg1, arg00, 0))
8016 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8020 c = TREE_REAL_CST (arg01);
8021 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8022 arg = build_real (type, c);
8023 arglist = build_tree_list (NULL_TREE, arg);
8024 arglist = tree_cons (NULL_TREE, arg1, arglist);
8025 return build_function_call_expr (powfn, arglist);
8029 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8031 && operand_equal_p (arg0, arg1, 0))
8033 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8037 tree arg = build_real (type, dconst2);
8038 tree arglist = build_tree_list (NULL_TREE, arg);
8039 arglist = tree_cons (NULL_TREE, arg0, arglist);
8040 return build_function_call_expr (powfn, arglist);
8049 if (integer_all_onesp (arg1))
8050 return omit_one_operand (type, arg1, arg0);
8051 if (integer_zerop (arg1))
8052 return non_lvalue (fold_convert (type, arg0));
8053 if (operand_equal_p (arg0, arg1, 0))
8054 return non_lvalue (fold_convert (type, arg0));
8057 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8058 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8060 t1 = build_int_cst (type, -1);
8061 t1 = force_fit_type (t1, 0, false, false);
8062 return omit_one_operand (type, t1, arg1);
8066 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8067 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8069 t1 = build_int_cst (type, -1);
8070 t1 = force_fit_type (t1, 0, false, false);
8071 return omit_one_operand (type, t1, arg0);
8074 t1 = distribute_bit_expr (code, type, arg0, arg1);
8075 if (t1 != NULL_TREE)
8078 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8080 This results in more efficient code for machines without a NAND
8081 instruction. Combine will canonicalize to the first form
8082 which will allow use of NAND instructions provided by the
8083 backend if they exist. */
8084 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8085 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8087 return fold_build1 (BIT_NOT_EXPR, type,
8088 build2 (BIT_AND_EXPR, type,
8089 TREE_OPERAND (arg0, 0),
8090 TREE_OPERAND (arg1, 0)));
8093 /* See if this can be simplified into a rotate first. If that
8094 is unsuccessful continue in the association code. */
8098 if (integer_zerop (arg1))
8099 return non_lvalue (fold_convert (type, arg0));
8100 if (integer_all_onesp (arg1))
8101 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8102 if (operand_equal_p (arg0, arg1, 0))
8103 return omit_one_operand (type, integer_zero_node, arg0);
8106 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8107 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8109 t1 = build_int_cst (type, -1);
8110 t1 = force_fit_type (t1, 0, false, false);
8111 return omit_one_operand (type, t1, arg1);
8115 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8116 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8118 t1 = build_int_cst (type, -1);
8119 t1 = force_fit_type (t1, 0, false, false);
8120 return omit_one_operand (type, t1, arg0);
8123 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8124 with a constant, and the two constants have no bits in common,
8125 we should treat this as a BIT_IOR_EXPR since this may produce more
8127 if (TREE_CODE (arg0) == BIT_AND_EXPR
8128 && TREE_CODE (arg1) == BIT_AND_EXPR
8129 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8130 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8131 && integer_zerop (const_binop (BIT_AND_EXPR,
8132 TREE_OPERAND (arg0, 1),
8133 TREE_OPERAND (arg1, 1), 0)))
8135 code = BIT_IOR_EXPR;
8139 /* (X | Y) ^ X -> Y & ~ X*/
8140 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8141 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8143 tree t2 = TREE_OPERAND (arg0, 1);
8144 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8146 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8147 fold_convert (type, t1));
8151 /* (Y | X) ^ X -> Y & ~ X*/
8152 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8153 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8155 tree t2 = TREE_OPERAND (arg0, 0);
8156 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8158 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8159 fold_convert (type, t1));
8163 /* X ^ (X | Y) -> Y & ~ X*/
8164 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8165 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8167 tree t2 = TREE_OPERAND (arg1, 1);
8168 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8170 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8171 fold_convert (type, t1));
8175 /* X ^ (Y | X) -> Y & ~ X*/
8176 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8177 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8179 tree t2 = TREE_OPERAND (arg1, 0);
8180 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8182 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8183 fold_convert (type, t1));
8187 /* Convert ~X ^ ~Y to X ^ Y. */
8188 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8189 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8190 return fold_build2 (code, type,
8191 fold_convert (type, TREE_OPERAND (arg0, 0)),
8192 fold_convert (type, TREE_OPERAND (arg1, 0)));
8194 /* See if this can be simplified into a rotate first. If that
8195 is unsuccessful continue in the association code. */
8199 if (integer_all_onesp (arg1))
8200 return non_lvalue (fold_convert (type, arg0));
8201 if (integer_zerop (arg1))
8202 return omit_one_operand (type, arg1, arg0);
8203 if (operand_equal_p (arg0, arg1, 0))
8204 return non_lvalue (fold_convert (type, arg0));
8206 /* ~X & X is always zero. */
8207 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8208 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8209 return omit_one_operand (type, integer_zero_node, arg1);
8211 /* X & ~X is always zero. */
8212 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8213 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8214 return omit_one_operand (type, integer_zero_node, arg0);
8216 t1 = distribute_bit_expr (code, type, arg0, arg1);
8217 if (t1 != NULL_TREE)
8219 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8220 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8221 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8224 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8226 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8227 && (~TREE_INT_CST_LOW (arg1)
8228 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8229 return fold_convert (type, TREE_OPERAND (arg0, 0));
8232 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8234 This results in more efficient code for machines without a NOR
8235 instruction. Combine will canonicalize to the first form
8236 which will allow use of NOR instructions provided by the
8237 backend if they exist. */
8238 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8239 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8241 return fold_build1 (BIT_NOT_EXPR, type,
8242 build2 (BIT_IOR_EXPR, type,
8243 TREE_OPERAND (arg0, 0),
8244 TREE_OPERAND (arg1, 0)));
8250 /* Don't touch a floating-point divide by zero unless the mode
8251 of the constant can represent infinity. */
8252 if (TREE_CODE (arg1) == REAL_CST
8253 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8254 && real_zerop (arg1))
8257 /* Optimize A / A to 1.0 if we don't care about
8258 NaNs or Infinities. */
8259 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8260 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8261 && operand_equal_p (arg0, arg1, 0))
8263 tree r = build_real (TREE_TYPE (arg0), dconst1);
8265 return omit_two_operands (type, r, arg0, arg1);
8268 /* (-A) / (-B) -> A / B */
8269 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8270 return fold_build2 (RDIV_EXPR, type,
8271 TREE_OPERAND (arg0, 0),
8272 negate_expr (arg1));
8273 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8274 return fold_build2 (RDIV_EXPR, type,
8276 TREE_OPERAND (arg1, 0));
8278 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8279 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8280 && real_onep (arg1))
8281 return non_lvalue (fold_convert (type, arg0));
8283 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8284 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8285 && real_minus_onep (arg1))
8286 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8288 /* If ARG1 is a constant, we can convert this to a multiply by the
8289 reciprocal. This does not have the same rounding properties,
8290 so only do this if -funsafe-math-optimizations. We can actually
8291 always safely do it if ARG1 is a power of two, but it's hard to
8292 tell if it is or not in a portable manner. */
8293 if (TREE_CODE (arg1) == REAL_CST)
8295 if (flag_unsafe_math_optimizations
8296 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8298 return fold_build2 (MULT_EXPR, type, arg0, tem);
8299 /* Find the reciprocal if optimizing and the result is exact. */
8303 r = TREE_REAL_CST (arg1);
8304 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8306 tem = build_real (type, r);
8307 return fold_build2 (MULT_EXPR, type,
8308 fold_convert (type, arg0), tem);
8312 /* Convert A/B/C to A/(B*C). */
8313 if (flag_unsafe_math_optimizations
8314 && TREE_CODE (arg0) == RDIV_EXPR)
8315 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8316 fold_build2 (MULT_EXPR, type,
8317 TREE_OPERAND (arg0, 1), arg1));
8319 /* Convert A/(B/C) to (A/B)*C. */
8320 if (flag_unsafe_math_optimizations
8321 && TREE_CODE (arg1) == RDIV_EXPR)
8322 return fold_build2 (MULT_EXPR, type,
8323 fold_build2 (RDIV_EXPR, type, arg0,
8324 TREE_OPERAND (arg1, 0)),
8325 TREE_OPERAND (arg1, 1));
8327 /* Convert C1/(X*C2) into (C1/C2)/X. */
8328 if (flag_unsafe_math_optimizations
8329 && TREE_CODE (arg1) == MULT_EXPR
8330 && TREE_CODE (arg0) == REAL_CST
8331 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8333 tree tem = const_binop (RDIV_EXPR, arg0,
8334 TREE_OPERAND (arg1, 1), 0);
8336 return fold_build2 (RDIV_EXPR, type, tem,
8337 TREE_OPERAND (arg1, 0));
8340 if (flag_unsafe_math_optimizations)
8342 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8343 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8345 /* Optimize sin(x)/cos(x) as tan(x). */
8346 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8347 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8348 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8349 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8350 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8352 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8354 if (tanfn != NULL_TREE)
8355 return build_function_call_expr (tanfn,
8356 TREE_OPERAND (arg0, 1));
8359 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8360 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8361 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8362 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8363 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8364 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8366 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8368 if (tanfn != NULL_TREE)
8370 tree tmp = TREE_OPERAND (arg0, 1);
8371 tmp = build_function_call_expr (tanfn, tmp);
8372 return fold_build2 (RDIV_EXPR, type,
8373 build_real (type, dconst1), tmp);
8377 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
8378 NaNs or Infinities. */
8379 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
8380 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
8381 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
8383 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8384 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8386 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8387 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8388 && operand_equal_p (arg00, arg01, 0))
8390 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8392 if (cosfn != NULL_TREE)
8393 return build_function_call_expr (cosfn,
8394 TREE_OPERAND (arg0, 1));
8398 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
8399 NaNs or Infinities. */
8400 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
8401 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
8402 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
8404 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8405 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8407 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
8408 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
8409 && operand_equal_p (arg00, arg01, 0))
8411 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
8413 if (cosfn != NULL_TREE)
8415 tree tmp = TREE_OPERAND (arg0, 1);
8416 tmp = build_function_call_expr (cosfn, tmp);
8417 return fold_build2 (RDIV_EXPR, type,
8418 build_real (type, dconst1),
8424 /* Optimize pow(x,c)/x as pow(x,c-1). */
8425 if (fcode0 == BUILT_IN_POW
8426 || fcode0 == BUILT_IN_POWF
8427 || fcode0 == BUILT_IN_POWL)
8429 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8430 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8431 if (TREE_CODE (arg01) == REAL_CST
8432 && ! TREE_CONSTANT_OVERFLOW (arg01)
8433 && operand_equal_p (arg1, arg00, 0))
8435 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8439 c = TREE_REAL_CST (arg01);
8440 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8441 arg = build_real (type, c);
8442 arglist = build_tree_list (NULL_TREE, arg);
8443 arglist = tree_cons (NULL_TREE, arg1, arglist);
8444 return build_function_call_expr (powfn, arglist);
8448 /* Optimize x/expN(y) into x*expN(-y). */
8449 if (BUILTIN_EXPONENT_P (fcode1))
8451 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8452 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8453 tree arglist = build_tree_list (NULL_TREE,
8454 fold_convert (type, arg));
8455 arg1 = build_function_call_expr (expfn, arglist);
8456 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8459 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8460 if (fcode1 == BUILT_IN_POW
8461 || fcode1 == BUILT_IN_POWF
8462 || fcode1 == BUILT_IN_POWL)
8464 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8465 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8466 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8467 tree neg11 = fold_convert (type, negate_expr (arg11));
8468 tree arglist = tree_cons(NULL_TREE, arg10,
8469 build_tree_list (NULL_TREE, neg11));
8470 arg1 = build_function_call_expr (powfn, arglist);
8471 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8476 case TRUNC_DIV_EXPR:
8477 case ROUND_DIV_EXPR:
8478 case FLOOR_DIV_EXPR:
8480 case EXACT_DIV_EXPR:
8481 if (integer_onep (arg1))
8482 return non_lvalue (fold_convert (type, arg0));
8483 if (integer_zerop (arg1))
8486 if (!TYPE_UNSIGNED (type)
8487 && TREE_CODE (arg1) == INTEGER_CST
8488 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8489 && TREE_INT_CST_HIGH (arg1) == -1)
8490 return fold_convert (type, negate_expr (arg0));
8492 /* Convert -A / -B to A / B when the type is signed and overflow is
8494 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8495 && TREE_CODE (arg0) == NEGATE_EXPR
8496 && negate_expr_p (arg1))
8497 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8498 negate_expr (arg1));
8499 if (!TYPE_UNSIGNED (type) && !flag_wrapv
8500 && TREE_CODE (arg1) == NEGATE_EXPR
8501 && negate_expr_p (arg0))
8502 return fold_build2 (code, type, negate_expr (arg0),
8503 TREE_OPERAND (arg1, 0));
8505 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8506 operation, EXACT_DIV_EXPR.
8508 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8509 At one time others generated faster code, it's not clear if they do
8510 after the last round to changes to the DIV code in expmed.c. */
8511 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8512 && multiple_of_p (type, arg0, arg1))
8513 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8515 if (TREE_CODE (arg1) == INTEGER_CST
8516 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8517 return fold_convert (type, tem);
8522 case FLOOR_MOD_EXPR:
8523 case ROUND_MOD_EXPR:
8524 case TRUNC_MOD_EXPR:
8525 /* X % 1 is always zero, but be sure to preserve any side
8527 if (integer_onep (arg1))
8528 return omit_one_operand (type, integer_zero_node, arg0);
8530 /* X % 0, return X % 0 unchanged so that we can get the
8531 proper warnings and errors. */
8532 if (integer_zerop (arg1))
8535 /* 0 % X is always zero, but be sure to preserve any side
8536 effects in X. Place this after checking for X == 0. */
8537 if (integer_zerop (arg0))
8538 return omit_one_operand (type, integer_zero_node, arg1);
8540 /* X % -1 is zero. */
8541 if (!TYPE_UNSIGNED (type)
8542 && TREE_CODE (arg1) == INTEGER_CST
8543 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8544 && TREE_INT_CST_HIGH (arg1) == -1)
8545 return omit_one_operand (type, integer_zero_node, arg0);
8547 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8548 i.e. "X % C" into "X & C2", if X and C are positive. */
8549 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8550 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8551 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8553 unsigned HOST_WIDE_INT high, low;
8557 l = tree_log2 (arg1);
8558 if (l >= HOST_BITS_PER_WIDE_INT)
8560 high = ((unsigned HOST_WIDE_INT) 1
8561 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8567 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8570 mask = build_int_cst_wide (type, low, high);
8571 return fold_build2 (BIT_AND_EXPR, type,
8572 fold_convert (type, arg0), mask);
8575 /* X % -C is the same as X % C. */
8576 if (code == TRUNC_MOD_EXPR
8577 && !TYPE_UNSIGNED (type)
8578 && TREE_CODE (arg1) == INTEGER_CST
8579 && !TREE_CONSTANT_OVERFLOW (arg1)
8580 && TREE_INT_CST_HIGH (arg1) < 0
8582 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8583 && !sign_bit_p (arg1, arg1))
8584 return fold_build2 (code, type, fold_convert (type, arg0),
8585 fold_convert (type, negate_expr (arg1)));
8587 /* X % -Y is the same as X % Y. */
8588 if (code == TRUNC_MOD_EXPR
8589 && !TYPE_UNSIGNED (type)
8590 && TREE_CODE (arg1) == NEGATE_EXPR
8592 return fold_build2 (code, type, fold_convert (type, arg0),
8593 fold_convert (type, TREE_OPERAND (arg1, 0)));
8595 if (TREE_CODE (arg1) == INTEGER_CST
8596 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8597 return fold_convert (type, tem);
8603 if (integer_all_onesp (arg0))
8604 return omit_one_operand (type, arg0, arg1);
8608 /* Optimize -1 >> x for arithmetic right shifts. */
8609 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8610 return omit_one_operand (type, arg0, arg1);
8611 /* ... fall through ... */
8615 if (integer_zerop (arg1))
8616 return non_lvalue (fold_convert (type, arg0));
8617 if (integer_zerop (arg0))
8618 return omit_one_operand (type, arg0, arg1);
8620 /* Since negative shift count is not well-defined,
8621 don't try to compute it in the compiler. */
8622 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8625 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8626 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8627 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8628 && host_integerp (TREE_OPERAND (arg0, 1), false)
8629 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8631 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8632 + TREE_INT_CST_LOW (arg1));
8634 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8635 being well defined. */
8636 if (low >= TYPE_PRECISION (type))
8638 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8639 low = low % TYPE_PRECISION (type);
8640 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8641 return build_int_cst (type, 0);
8643 low = TYPE_PRECISION (type) - 1;
8646 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8647 build_int_cst (type, low));
8650 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8651 into x & ((unsigned)-1 >> c) for unsigned types. */
8652 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8653 || (TYPE_UNSIGNED (type)
8654 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8655 && host_integerp (arg1, false)
8656 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8657 && host_integerp (TREE_OPERAND (arg0, 1), false)
8658 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8660 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8661 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8667 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8669 lshift = build_int_cst (type, -1);
8670 lshift = int_const_binop (code, lshift, arg1, 0);
8672 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8676 /* Rewrite an LROTATE_EXPR by a constant into an
8677 RROTATE_EXPR by a new constant. */
8678 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8680 tree tem = build_int_cst (NULL_TREE,
8681 GET_MODE_BITSIZE (TYPE_MODE (type)));
8682 tem = fold_convert (TREE_TYPE (arg1), tem);
8683 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8684 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8687 /* If we have a rotate of a bit operation with the rotate count and
8688 the second operand of the bit operation both constant,
8689 permute the two operations. */
8690 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8691 && (TREE_CODE (arg0) == BIT_AND_EXPR
8692 || TREE_CODE (arg0) == BIT_IOR_EXPR
8693 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8694 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8695 return fold_build2 (TREE_CODE (arg0), type,
8696 fold_build2 (code, type,
8697 TREE_OPERAND (arg0, 0), arg1),
8698 fold_build2 (code, type,
8699 TREE_OPERAND (arg0, 1), arg1));
8701 /* Two consecutive rotates adding up to the width of the mode can
8703 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8704 && TREE_CODE (arg0) == RROTATE_EXPR
8705 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8706 && TREE_INT_CST_HIGH (arg1) == 0
8707 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8708 && ((TREE_INT_CST_LOW (arg1)
8709 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8710 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8711 return TREE_OPERAND (arg0, 0);
8716 if (operand_equal_p (arg0, arg1, 0))
8717 return omit_one_operand (type, arg0, arg1);
8718 if (INTEGRAL_TYPE_P (type)
8719 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8720 return omit_one_operand (type, arg1, arg0);
8724 if (operand_equal_p (arg0, arg1, 0))
8725 return omit_one_operand (type, arg0, arg1);
8726 if (INTEGRAL_TYPE_P (type)
8727 && TYPE_MAX_VALUE (type)
8728 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8729 return omit_one_operand (type, arg1, arg0);
8732 case TRUTH_ANDIF_EXPR:
8733 /* Note that the operands of this must be ints
8734 and their values must be 0 or 1.
8735 ("true" is a fixed value perhaps depending on the language.) */
8736 /* If first arg is constant zero, return it. */
8737 if (integer_zerop (arg0))
8738 return fold_convert (type, arg0);
8739 case TRUTH_AND_EXPR:
8740 /* If either arg is constant true, drop it. */
8741 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8742 return non_lvalue (fold_convert (type, arg1));
8743 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8744 /* Preserve sequence points. */
8745 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8746 return non_lvalue (fold_convert (type, arg0));
8747 /* If second arg is constant zero, result is zero, but first arg
8748 must be evaluated. */
8749 if (integer_zerop (arg1))
8750 return omit_one_operand (type, arg1, arg0);
8751 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8752 case will be handled here. */
8753 if (integer_zerop (arg0))
8754 return omit_one_operand (type, arg0, arg1);
8756 /* !X && X is always false. */
8757 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8758 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8759 return omit_one_operand (type, integer_zero_node, arg1);
8760 /* X && !X is always false. */
8761 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8762 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8763 return omit_one_operand (type, integer_zero_node, arg0);
8765 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8766 means A >= Y && A != MAX, but in this case we know that
8769 if (!TREE_SIDE_EFFECTS (arg0)
8770 && !TREE_SIDE_EFFECTS (arg1))
8772 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8773 if (tem && !operand_equal_p (tem, arg0, 0))
8774 return fold_build2 (code, type, tem, arg1);
8776 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8777 if (tem && !operand_equal_p (tem, arg1, 0))
8778 return fold_build2 (code, type, arg0, tem);
8782 /* We only do these simplifications if we are optimizing. */
8786 /* Check for things like (A || B) && (A || C). We can convert this
8787 to A || (B && C). Note that either operator can be any of the four
8788 truth and/or operations and the transformation will still be
8789 valid. Also note that we only care about order for the
8790 ANDIF and ORIF operators. If B contains side effects, this
8791 might change the truth-value of A. */
8792 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8793 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8794 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8795 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8796 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8797 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8799 tree a00 = TREE_OPERAND (arg0, 0);
8800 tree a01 = TREE_OPERAND (arg0, 1);
8801 tree a10 = TREE_OPERAND (arg1, 0);
8802 tree a11 = TREE_OPERAND (arg1, 1);
8803 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8804 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8805 && (code == TRUTH_AND_EXPR
8806 || code == TRUTH_OR_EXPR));
8808 if (operand_equal_p (a00, a10, 0))
8809 return fold_build2 (TREE_CODE (arg0), type, a00,
8810 fold_build2 (code, type, a01, a11));
8811 else if (commutative && operand_equal_p (a00, a11, 0))
8812 return fold_build2 (TREE_CODE (arg0), type, a00,
8813 fold_build2 (code, type, a01, a10));
8814 else if (commutative && operand_equal_p (a01, a10, 0))
8815 return fold_build2 (TREE_CODE (arg0), type, a01,
8816 fold_build2 (code, type, a00, a11));
8818 /* This case if tricky because we must either have commutative
8819 operators or else A10 must not have side-effects. */
8821 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8822 && operand_equal_p (a01, a11, 0))
8823 return fold_build2 (TREE_CODE (arg0), type,
8824 fold_build2 (code, type, a00, a10),
8828 /* See if we can build a range comparison. */
8829 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8832 /* Check for the possibility of merging component references. If our
8833 lhs is another similar operation, try to merge its rhs with our
8834 rhs. Then try to merge our lhs and rhs. */
8835 if (TREE_CODE (arg0) == code
8836 && 0 != (tem = fold_truthop (code, type,
8837 TREE_OPERAND (arg0, 1), arg1)))
8838 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8840 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8845 case TRUTH_ORIF_EXPR:
8846 /* Note that the operands of this must be ints
8847 and their values must be 0 or true.
8848 ("true" is a fixed value perhaps depending on the language.) */
8849 /* If first arg is constant true, return it. */
8850 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8851 return fold_convert (type, arg0);
8853 /* If either arg is constant zero, drop it. */
8854 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8855 return non_lvalue (fold_convert (type, arg1));
8856 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8857 /* Preserve sequence points. */
8858 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8859 return non_lvalue (fold_convert (type, arg0));
8860 /* If second arg is constant true, result is true, but we must
8861 evaluate first arg. */
8862 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8863 return omit_one_operand (type, arg1, arg0);
8864 /* Likewise for first arg, but note this only occurs here for
8866 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8867 return omit_one_operand (type, arg0, arg1);
8869 /* !X || X is always true. */
8870 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8871 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8872 return omit_one_operand (type, integer_one_node, arg1);
8873 /* X || !X is always true. */
8874 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8875 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8876 return omit_one_operand (type, integer_one_node, arg0);
8880 case TRUTH_XOR_EXPR:
8881 /* If the second arg is constant zero, drop it. */
8882 if (integer_zerop (arg1))
8883 return non_lvalue (fold_convert (type, arg0));
8884 /* If the second arg is constant true, this is a logical inversion. */
8885 if (integer_onep (arg1))
8887 /* Only call invert_truthvalue if operand is a truth value. */
8888 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8889 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8891 tem = invert_truthvalue (arg0);
8892 return non_lvalue (fold_convert (type, tem));
8894 /* Identical arguments cancel to zero. */
8895 if (operand_equal_p (arg0, arg1, 0))
8896 return omit_one_operand (type, integer_zero_node, arg0);
8898 /* !X ^ X is always true. */
8899 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8900 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8901 return omit_one_operand (type, integer_one_node, arg1);
8903 /* X ^ !X is always true. */
8904 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8905 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8906 return omit_one_operand (type, integer_one_node, arg0);
8916 /* If one arg is a real or integer constant, put it last. */
8917 if (tree_swap_operands_p (arg0, arg1, true))
8918 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8920 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
8921 if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
8922 && (code == NE_EXPR || code == EQ_EXPR))
8923 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8924 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8927 /* bool_var != 0 becomes bool_var. */
8928 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8930 return non_lvalue (fold_convert (type, arg0));
8932 /* bool_var == 1 becomes bool_var. */
8933 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8935 return non_lvalue (fold_convert (type, arg0));
8937 /* bool_var != 1 becomes !bool_var. */
8938 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8940 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8942 /* bool_var == 0 becomes !bool_var. */
8943 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8945 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8947 /* If this is an equality comparison of the address of a non-weak
8948 object against zero, then we know the result. */
8949 if ((code == EQ_EXPR || code == NE_EXPR)
8950 && TREE_CODE (arg0) == ADDR_EXPR
8951 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8952 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8953 && integer_zerop (arg1))
8954 return constant_boolean_node (code != EQ_EXPR, type);
8956 /* If this is an equality comparison of the address of two non-weak,
8957 unaliased symbols neither of which are extern (since we do not
8958 have access to attributes for externs), then we know the result. */
8959 if ((code == EQ_EXPR || code == NE_EXPR)
8960 && TREE_CODE (arg0) == ADDR_EXPR
8961 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8962 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8963 && ! lookup_attribute ("alias",
8964 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8965 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8966 && TREE_CODE (arg1) == ADDR_EXPR
8967 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8968 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8969 && ! lookup_attribute ("alias",
8970 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8971 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8973 /* We know that we're looking at the address of two
8974 non-weak, unaliased, static _DECL nodes.
8976 It is both wasteful and incorrect to call operand_equal_p
8977 to compare the two ADDR_EXPR nodes. It is wasteful in that
8978 all we need to do is test pointer equality for the arguments
8979 to the two ADDR_EXPR nodes. It is incorrect to use
8980 operand_equal_p as that function is NOT equivalent to a
8981 C equality test. It can in fact return false for two
8982 objects which would test as equal using the C equality
8984 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8985 return constant_boolean_node (equal
8986 ? code == EQ_EXPR : code != EQ_EXPR,
8990 /* If this is a comparison of two exprs that look like an
8991 ARRAY_REF of the same object, then we can fold this to a
8992 comparison of the two offsets. */
8993 if (TREE_CODE_CLASS (code) == tcc_comparison)
8995 tree base0, offset0, base1, offset1;
8997 if (extract_array_ref (arg0, &base0, &offset0)
8998 && extract_array_ref (arg1, &base1, &offset1)
8999 && operand_equal_p (base0, base1, 0))
9001 /* Handle no offsets on both sides specially. */
9002 if (offset0 == NULL_TREE
9003 && offset1 == NULL_TREE)
9004 return fold_build2 (code, type, integer_zero_node,
9007 if (!offset0 || !offset1
9008 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
9010 if (offset0 == NULL_TREE)
9011 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9012 if (offset1 == NULL_TREE)
9013 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9014 return fold_build2 (code, type, offset0, offset1);
9019 /* Transform comparisons of the form X +- C CMP X. */
9020 if ((code != EQ_EXPR && code != NE_EXPR)
9021 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9022 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9023 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9024 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9025 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9026 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9027 && !(flag_wrapv || flag_trapv))))
9029 tree arg01 = TREE_OPERAND (arg0, 1);
9030 enum tree_code code0 = TREE_CODE (arg0);
9033 if (TREE_CODE (arg01) == REAL_CST)
9034 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9036 is_positive = tree_int_cst_sgn (arg01);
9038 /* (X - c) > X becomes false. */
9040 && ((code0 == MINUS_EXPR && is_positive >= 0)
9041 || (code0 == PLUS_EXPR && is_positive <= 0)))
9042 return constant_boolean_node (0, type);
9044 /* Likewise (X + c) < X becomes false. */
9046 && ((code0 == PLUS_EXPR && is_positive >= 0)
9047 || (code0 == MINUS_EXPR && is_positive <= 0)))
9048 return constant_boolean_node (0, type);
9050 /* Convert (X - c) <= X to true. */
9051 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9053 && ((code0 == MINUS_EXPR && is_positive >= 0)
9054 || (code0 == PLUS_EXPR && is_positive <= 0)))
9055 return constant_boolean_node (1, type);
9057 /* Convert (X + c) >= X to true. */
9058 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9060 && ((code0 == PLUS_EXPR && is_positive >= 0)
9061 || (code0 == MINUS_EXPR && is_positive <= 0)))
9062 return constant_boolean_node (1, type);
9064 if (TREE_CODE (arg01) == INTEGER_CST)
9066 /* Convert X + c > X and X - c < X to true for integers. */
9068 && ((code0 == PLUS_EXPR && is_positive > 0)
9069 || (code0 == MINUS_EXPR && is_positive < 0)))
9070 return constant_boolean_node (1, type);
9073 && ((code0 == MINUS_EXPR && is_positive > 0)
9074 || (code0 == PLUS_EXPR && is_positive < 0)))
9075 return constant_boolean_node (1, type);
9077 /* Convert X + c <= X and X - c >= X to false for integers. */
9079 && ((code0 == PLUS_EXPR && is_positive > 0)
9080 || (code0 == MINUS_EXPR && is_positive < 0)))
9081 return constant_boolean_node (0, type);
9084 && ((code0 == MINUS_EXPR && is_positive > 0)
9085 || (code0 == PLUS_EXPR && is_positive < 0)))
9086 return constant_boolean_node (0, type);
9090 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9091 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9092 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9093 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9094 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9095 && !(flag_wrapv || flag_trapv))
9096 && (TREE_CODE (arg1) == INTEGER_CST
9097 && !TREE_OVERFLOW (arg1)))
9099 tree const1 = TREE_OPERAND (arg0, 1);
9101 tree variable = TREE_OPERAND (arg0, 0);
9104 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9106 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9107 TREE_TYPE (arg1), const2, const1);
9108 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9109 && (TREE_CODE (lhs) != INTEGER_CST
9110 || !TREE_OVERFLOW (lhs)))
9111 return fold_build2 (code, type, variable, lhs);
9114 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9116 tree targ0 = strip_float_extensions (arg0);
9117 tree targ1 = strip_float_extensions (arg1);
9118 tree newtype = TREE_TYPE (targ0);
9120 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9121 newtype = TREE_TYPE (targ1);
9123 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9124 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9125 return fold_build2 (code, type, fold_convert (newtype, targ0),
9126 fold_convert (newtype, targ1));
9128 /* (-a) CMP (-b) -> b CMP a */
9129 if (TREE_CODE (arg0) == NEGATE_EXPR
9130 && TREE_CODE (arg1) == NEGATE_EXPR)
9131 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9132 TREE_OPERAND (arg0, 0));
9134 if (TREE_CODE (arg1) == REAL_CST)
9136 REAL_VALUE_TYPE cst;
9137 cst = TREE_REAL_CST (arg1);
9139 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9140 if (TREE_CODE (arg0) == NEGATE_EXPR)
9142 fold_build2 (swap_tree_comparison (code), type,
9143 TREE_OPERAND (arg0, 0),
9144 build_real (TREE_TYPE (arg1),
9145 REAL_VALUE_NEGATE (cst)));
9147 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9148 /* a CMP (-0) -> a CMP 0 */
9149 if (REAL_VALUE_MINUS_ZERO (cst))
9150 return fold_build2 (code, type, arg0,
9151 build_real (TREE_TYPE (arg1), dconst0));
9153 /* x != NaN is always true, other ops are always false. */
9154 if (REAL_VALUE_ISNAN (cst)
9155 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9157 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9158 return omit_one_operand (type, tem, arg0);
9161 /* Fold comparisons against infinity. */
9162 if (REAL_VALUE_ISINF (cst))
9164 tem = fold_inf_compare (code, type, arg0, arg1);
9165 if (tem != NULL_TREE)
9170 /* If this is a comparison of a real constant with a PLUS_EXPR
9171 or a MINUS_EXPR of a real constant, we can convert it into a
9172 comparison with a revised real constant as long as no overflow
9173 occurs when unsafe_math_optimizations are enabled. */
9174 if (flag_unsafe_math_optimizations
9175 && TREE_CODE (arg1) == REAL_CST
9176 && (TREE_CODE (arg0) == PLUS_EXPR
9177 || TREE_CODE (arg0) == MINUS_EXPR)
9178 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9179 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9180 ? MINUS_EXPR : PLUS_EXPR,
9181 arg1, TREE_OPERAND (arg0, 1), 0))
9182 && ! TREE_CONSTANT_OVERFLOW (tem))
9183 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9185 /* Likewise, we can simplify a comparison of a real constant with
9186 a MINUS_EXPR whose first operand is also a real constant, i.e.
9187 (c1 - x) < c2 becomes x > c1-c2. */
9188 if (flag_unsafe_math_optimizations
9189 && TREE_CODE (arg1) == REAL_CST
9190 && TREE_CODE (arg0) == MINUS_EXPR
9191 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9192 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9194 && ! TREE_CONSTANT_OVERFLOW (tem))
9195 return fold_build2 (swap_tree_comparison (code), type,
9196 TREE_OPERAND (arg0, 1), tem);
9198 /* Fold comparisons against built-in math functions. */
9199 if (TREE_CODE (arg1) == REAL_CST
9200 && flag_unsafe_math_optimizations
9201 && ! flag_errno_math)
9203 enum built_in_function fcode = builtin_mathfn_code (arg0);
9205 if (fcode != END_BUILTINS)
9207 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9208 if (tem != NULL_TREE)
9214 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9215 if (TREE_CONSTANT (arg1)
9216 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9217 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9218 /* This optimization is invalid for ordered comparisons
9219 if CONST+INCR overflows or if foo+incr might overflow.
9220 This optimization is invalid for floating point due to rounding.
9221 For pointer types we assume overflow doesn't happen. */
9222 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9223 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9224 && (code == EQ_EXPR || code == NE_EXPR))))
9226 tree varop, newconst;
9228 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9230 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9231 arg1, TREE_OPERAND (arg0, 1));
9232 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9233 TREE_OPERAND (arg0, 0),
9234 TREE_OPERAND (arg0, 1));
9238 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9239 arg1, TREE_OPERAND (arg0, 1));
9240 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9241 TREE_OPERAND (arg0, 0),
9242 TREE_OPERAND (arg0, 1));
9246 /* If VAROP is a reference to a bitfield, we must mask
9247 the constant by the width of the field. */
9248 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9249 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9250 && host_integerp (DECL_SIZE (TREE_OPERAND
9251 (TREE_OPERAND (varop, 0), 1)), 1))
9253 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9254 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9255 tree folded_compare, shift;
9257 /* First check whether the comparison would come out
9258 always the same. If we don't do that we would
9259 change the meaning with the masking. */
9260 folded_compare = fold_build2 (code, type,
9261 TREE_OPERAND (varop, 0), arg1);
9262 if (integer_zerop (folded_compare)
9263 || integer_onep (folded_compare))
9264 return omit_one_operand (type, folded_compare, varop);
9266 shift = build_int_cst (NULL_TREE,
9267 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9268 shift = fold_convert (TREE_TYPE (varop), shift);
9269 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9271 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9275 return fold_build2 (code, type, varop, newconst);
9278 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9279 This transformation affects the cases which are handled in later
9280 optimizations involving comparisons with non-negative constants. */
9281 if (TREE_CODE (arg1) == INTEGER_CST
9282 && TREE_CODE (arg0) != INTEGER_CST
9283 && tree_int_cst_sgn (arg1) > 0)
9288 arg1 = const_binop (MINUS_EXPR, arg1,
9289 build_int_cst (TREE_TYPE (arg1), 1), 0);
9290 return fold_build2 (GT_EXPR, type, arg0,
9291 fold_convert (TREE_TYPE (arg0), arg1));
9294 arg1 = const_binop (MINUS_EXPR, arg1,
9295 build_int_cst (TREE_TYPE (arg1), 1), 0);
9296 return fold_build2 (LE_EXPR, type, arg0,
9297 fold_convert (TREE_TYPE (arg0), arg1));
9304 /* Comparisons with the highest or lowest possible integer of
9305 the specified size will have known values. */
9307 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9309 if (TREE_CODE (arg1) == INTEGER_CST
9310 && ! TREE_CONSTANT_OVERFLOW (arg1)
9311 && width <= 2 * HOST_BITS_PER_WIDE_INT
9312 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9313 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9315 HOST_WIDE_INT signed_max_hi;
9316 unsigned HOST_WIDE_INT signed_max_lo;
9317 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9319 if (width <= HOST_BITS_PER_WIDE_INT)
9321 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9326 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9328 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9334 max_lo = signed_max_lo;
9335 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9341 width -= HOST_BITS_PER_WIDE_INT;
9343 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9348 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9350 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9355 max_hi = signed_max_hi;
9356 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9360 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9361 && TREE_INT_CST_LOW (arg1) == max_lo)
9365 return omit_one_operand (type, integer_zero_node, arg0);
9368 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9371 return omit_one_operand (type, integer_one_node, arg0);
9374 return fold_build2 (NE_EXPR, type, arg0, arg1);
9376 /* The GE_EXPR and LT_EXPR cases above are not normally
9377 reached because of previous transformations. */
9382 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9384 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9388 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9389 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9391 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9392 return fold_build2 (NE_EXPR, type, arg0, arg1);
9396 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9398 && TREE_INT_CST_LOW (arg1) == min_lo)
9402 return omit_one_operand (type, integer_zero_node, arg0);
9405 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9408 return omit_one_operand (type, integer_one_node, arg0);
9411 return fold_build2 (NE_EXPR, type, op0, op1);
9416 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9418 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9422 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9423 return fold_build2 (NE_EXPR, type, arg0, arg1);
9425 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9426 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9431 else if (!in_gimple_form
9432 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9433 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9434 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9435 /* signed_type does not work on pointer types. */
9436 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9438 /* The following case also applies to X < signed_max+1
9439 and X >= signed_max+1 because previous transformations. */
9440 if (code == LE_EXPR || code == GT_EXPR)
9443 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9444 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9445 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9446 type, fold_convert (st0, arg0),
9447 build_int_cst (st1, 0));
9453 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9454 a MINUS_EXPR of a constant, we can convert it into a comparison with
9455 a revised constant as long as no overflow occurs. */
9456 if ((code == EQ_EXPR || code == NE_EXPR)
9457 && TREE_CODE (arg1) == INTEGER_CST
9458 && (TREE_CODE (arg0) == PLUS_EXPR
9459 || TREE_CODE (arg0) == MINUS_EXPR)
9460 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9461 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9462 ? MINUS_EXPR : PLUS_EXPR,
9463 arg1, TREE_OPERAND (arg0, 1), 0))
9464 && ! TREE_CONSTANT_OVERFLOW (tem))
9465 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9467 /* Similarly for a NEGATE_EXPR. */
9468 else if ((code == EQ_EXPR || code == NE_EXPR)
9469 && TREE_CODE (arg0) == NEGATE_EXPR
9470 && TREE_CODE (arg1) == INTEGER_CST
9471 && 0 != (tem = negate_expr (arg1))
9472 && TREE_CODE (tem) == INTEGER_CST
9473 && ! TREE_CONSTANT_OVERFLOW (tem))
9474 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9476 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9477 for !=. Don't do this for ordered comparisons due to overflow. */
9478 else if ((code == NE_EXPR || code == EQ_EXPR)
9479 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9480 return fold_build2 (code, type,
9481 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9483 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9484 && (TREE_CODE (arg0) == NOP_EXPR
9485 || TREE_CODE (arg0) == CONVERT_EXPR))
9487 /* If we are widening one operand of an integer comparison,
9488 see if the other operand is similarly being widened. Perhaps we
9489 can do the comparison in the narrower type. */
9490 tem = fold_widened_comparison (code, type, arg0, arg1);
9494 /* Or if we are changing signedness. */
9495 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9500 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9501 constant, we can simplify it. */
9502 else if (TREE_CODE (arg1) == INTEGER_CST
9503 && (TREE_CODE (arg0) == MIN_EXPR
9504 || TREE_CODE (arg0) == MAX_EXPR)
9505 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9507 tem = optimize_minmax_comparison (code, type, op0, op1);
9514 /* If we are comparing an ABS_EXPR with a constant, we can
9515 convert all the cases into explicit comparisons, but they may
9516 well not be faster than doing the ABS and one comparison.
9517 But ABS (X) <= C is a range comparison, which becomes a subtraction
9518 and a comparison, and is probably faster. */
9519 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9520 && TREE_CODE (arg0) == ABS_EXPR
9521 && ! TREE_SIDE_EFFECTS (arg0)
9522 && (0 != (tem = negate_expr (arg1)))
9523 && TREE_CODE (tem) == INTEGER_CST
9524 && ! TREE_CONSTANT_OVERFLOW (tem))
9525 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9526 build2 (GE_EXPR, type,
9527 TREE_OPERAND (arg0, 0), tem),
9528 build2 (LE_EXPR, type,
9529 TREE_OPERAND (arg0, 0), arg1));
9531 /* Convert ABS_EXPR<x> >= 0 to true. */
9532 else if (code == GE_EXPR
9533 && tree_expr_nonnegative_p (arg0)
9534 && (integer_zerop (arg1)
9535 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9536 && real_zerop (arg1))))
9537 return omit_one_operand (type, integer_one_node, arg0);
9539 /* Convert ABS_EXPR<x> < 0 to false. */
9540 else if (code == LT_EXPR
9541 && tree_expr_nonnegative_p (arg0)
9542 && (integer_zerop (arg1) || real_zerop (arg1)))
9543 return omit_one_operand (type, integer_zero_node, arg0);
9545 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9546 else if ((code == EQ_EXPR || code == NE_EXPR)
9547 && TREE_CODE (arg0) == ABS_EXPR
9548 && (integer_zerop (arg1) || real_zerop (arg1)))
9549 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9551 /* If this is an EQ or NE comparison with zero and ARG0 is
9552 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9553 two operations, but the latter can be done in one less insn
9554 on machines that have only two-operand insns or on which a
9555 constant cannot be the first operand. */
9556 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9557 && TREE_CODE (arg0) == BIT_AND_EXPR)
9559 tree arg00 = TREE_OPERAND (arg0, 0);
9560 tree arg01 = TREE_OPERAND (arg0, 1);
9561 if (TREE_CODE (arg00) == LSHIFT_EXPR
9562 && integer_onep (TREE_OPERAND (arg00, 0)))
9564 fold_build2 (code, type,
9565 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9566 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9567 arg01, TREE_OPERAND (arg00, 1)),
9568 fold_convert (TREE_TYPE (arg0),
9571 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9572 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9574 fold_build2 (code, type,
9575 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9576 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9577 arg00, TREE_OPERAND (arg01, 1)),
9578 fold_convert (TREE_TYPE (arg0),
9583 /* If this is an NE or EQ comparison of zero against the result of a
9584 signed MOD operation whose second operand is a power of 2, make
9585 the MOD operation unsigned since it is simpler and equivalent. */
9586 if ((code == NE_EXPR || code == EQ_EXPR)
9587 && integer_zerop (arg1)
9588 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9589 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9590 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9591 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9592 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9593 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9595 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9596 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9597 fold_convert (newtype,
9598 TREE_OPERAND (arg0, 0)),
9599 fold_convert (newtype,
9600 TREE_OPERAND (arg0, 1)));
9602 return fold_build2 (code, type, newmod,
9603 fold_convert (newtype, arg1));
9606 /* If this is an NE comparison of zero with an AND of one, remove the
9607 comparison since the AND will give the correct value. */
9608 if (code == NE_EXPR && integer_zerop (arg1)
9609 && TREE_CODE (arg0) == BIT_AND_EXPR
9610 && integer_onep (TREE_OPERAND (arg0, 1)))
9611 return fold_convert (type, arg0);
9613 /* If we have (A & C) == C where C is a power of 2, convert this into
9614 (A & C) != 0. Similarly for NE_EXPR. */
9615 if ((code == EQ_EXPR || code == NE_EXPR)
9616 && TREE_CODE (arg0) == BIT_AND_EXPR
9617 && integer_pow2p (TREE_OPERAND (arg0, 1))
9618 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9619 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9620 arg0, fold_convert (TREE_TYPE (arg0),
9621 integer_zero_node));
9623 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9624 bit, then fold the expression into A < 0 or A >= 0. */
9625 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9629 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9630 Similarly for NE_EXPR. */
9631 if ((code == EQ_EXPR || code == NE_EXPR)
9632 && TREE_CODE (arg0) == BIT_AND_EXPR
9633 && TREE_CODE (arg1) == INTEGER_CST
9634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9636 tree notc = fold_build1 (BIT_NOT_EXPR,
9637 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9638 TREE_OPERAND (arg0, 1));
9639 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9641 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9642 if (integer_nonzerop (dandnotc))
9643 return omit_one_operand (type, rslt, arg0);
9646 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9647 Similarly for NE_EXPR. */
9648 if ((code == EQ_EXPR || code == NE_EXPR)
9649 && TREE_CODE (arg0) == BIT_IOR_EXPR
9650 && TREE_CODE (arg1) == INTEGER_CST
9651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9653 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9654 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9655 TREE_OPERAND (arg0, 1), notd);
9656 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9657 if (integer_nonzerop (candnotd))
9658 return omit_one_operand (type, rslt, arg0);
9661 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9662 and similarly for >= into !=. */
9663 if ((code == LT_EXPR || code == GE_EXPR)
9664 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9665 && TREE_CODE (arg1) == LSHIFT_EXPR
9666 && integer_onep (TREE_OPERAND (arg1, 0)))
9667 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9668 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9669 TREE_OPERAND (arg1, 1)),
9670 build_int_cst (TREE_TYPE (arg0), 0));
9672 else if ((code == LT_EXPR || code == GE_EXPR)
9673 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9674 && (TREE_CODE (arg1) == NOP_EXPR
9675 || TREE_CODE (arg1) == CONVERT_EXPR)
9676 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9677 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9679 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9680 fold_convert (TREE_TYPE (arg0),
9681 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9682 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9684 build_int_cst (TREE_TYPE (arg0), 0));
9686 /* Simplify comparison of something with itself. (For IEEE
9687 floating-point, we can only do some of these simplifications.) */
9688 if (operand_equal_p (arg0, arg1, 0))
9693 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9694 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9695 return constant_boolean_node (1, type);
9700 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9701 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9702 return constant_boolean_node (1, type);
9703 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9706 /* For NE, we can only do this simplification if integer
9707 or we don't honor IEEE floating point NaNs. */
9708 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9709 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9711 /* ... fall through ... */
9714 return constant_boolean_node (0, type);
9720 /* If we are comparing an expression that just has comparisons
9721 of two integer values, arithmetic expressions of those comparisons,
9722 and constants, we can simplify it. There are only three cases
9723 to check: the two values can either be equal, the first can be
9724 greater, or the second can be greater. Fold the expression for
9725 those three values. Since each value must be 0 or 1, we have
9726 eight possibilities, each of which corresponds to the constant 0
9727 or 1 or one of the six possible comparisons.
9729 This handles common cases like (a > b) == 0 but also handles
9730 expressions like ((x > y) - (y > x)) > 0, which supposedly
9731 occur in macroized code. */
9733 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9735 tree cval1 = 0, cval2 = 0;
9738 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9739 /* Don't handle degenerate cases here; they should already
9740 have been handled anyway. */
9741 && cval1 != 0 && cval2 != 0
9742 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9743 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9744 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9745 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9746 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9747 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9748 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9750 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9751 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9753 /* We can't just pass T to eval_subst in case cval1 or cval2
9754 was the same as ARG1. */
9757 = fold_build2 (code, type,
9758 eval_subst (arg0, cval1, maxval,
9762 = fold_build2 (code, type,
9763 eval_subst (arg0, cval1, maxval,
9767 = fold_build2 (code, type,
9768 eval_subst (arg0, cval1, minval,
9772 /* All three of these results should be 0 or 1. Confirm they
9773 are. Then use those values to select the proper code
9776 if ((integer_zerop (high_result)
9777 || integer_onep (high_result))
9778 && (integer_zerop (equal_result)
9779 || integer_onep (equal_result))
9780 && (integer_zerop (low_result)
9781 || integer_onep (low_result)))
9783 /* Make a 3-bit mask with the high-order bit being the
9784 value for `>', the next for '=', and the low for '<'. */
9785 switch ((integer_onep (high_result) * 4)
9786 + (integer_onep (equal_result) * 2)
9787 + integer_onep (low_result))
9791 return omit_one_operand (type, integer_zero_node, arg0);
9812 return omit_one_operand (type, integer_one_node, arg0);
9816 return save_expr (build2 (code, type, cval1, cval2));
9818 return fold_build2 (code, type, cval1, cval2);
9823 /* If this is a comparison of a field, we may be able to simplify it. */
9824 if (((TREE_CODE (arg0) == COMPONENT_REF
9825 && lang_hooks.can_use_bit_fields_p ())
9826 || TREE_CODE (arg0) == BIT_FIELD_REF)
9827 && (code == EQ_EXPR || code == NE_EXPR)
9828 /* Handle the constant case even without -O
9829 to make sure the warnings are given. */
9830 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9832 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9837 /* Fold a comparison of the address of COMPONENT_REFs with the same
9838 type and component to a comparison of the address of the base
9839 object. In short, &x->a OP &y->a to x OP y and
9840 &x->a OP &y.a to x OP &y */
9841 if (TREE_CODE (arg0) == ADDR_EXPR
9842 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9843 && TREE_CODE (arg1) == ADDR_EXPR
9844 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9846 tree cref0 = TREE_OPERAND (arg0, 0);
9847 tree cref1 = TREE_OPERAND (arg1, 0);
9848 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9850 tree op0 = TREE_OPERAND (cref0, 0);
9851 tree op1 = TREE_OPERAND (cref1, 0);
9852 return fold_build2 (code, type,
9853 build_fold_addr_expr (op0),
9854 build_fold_addr_expr (op1));
9858 /* Optimize comparisons of strlen vs zero to a compare of the
9859 first character of the string vs zero. To wit,
9860 strlen(ptr) == 0 => *ptr == 0
9861 strlen(ptr) != 0 => *ptr != 0
9862 Other cases should reduce to one of these two (or a constant)
9863 due to the return value of strlen being unsigned. */
9864 if ((code == EQ_EXPR || code == NE_EXPR)
9865 && integer_zerop (arg1)
9866 && TREE_CODE (arg0) == CALL_EXPR)
9868 tree fndecl = get_callee_fndecl (arg0);
9872 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9873 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9874 && (arglist = TREE_OPERAND (arg0, 1))
9875 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9876 && ! TREE_CHAIN (arglist))
9878 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9879 return fold_build2 (code, type, iref,
9880 build_int_cst (TREE_TYPE (iref), 0));
9884 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9885 into a single range test. */
9886 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9887 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9888 && TREE_CODE (arg1) == INTEGER_CST
9889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9890 && !integer_zerop (TREE_OPERAND (arg0, 1))
9891 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9892 && !TREE_OVERFLOW (arg1))
9894 t1 = fold_div_compare (code, type, arg0, arg1);
9895 if (t1 != NULL_TREE)
9899 if ((code == EQ_EXPR || code == NE_EXPR)
9900 && integer_zerop (arg1)
9901 && tree_expr_nonzero_p (arg0))
9903 tree res = constant_boolean_node (code==NE_EXPR, type);
9904 return omit_one_operand (type, res, arg0);
9907 t1 = fold_relational_const (code, type, arg0, arg1);
9908 return t1 == NULL_TREE ? NULL_TREE : t1;
9910 case UNORDERED_EXPR:
9918 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9920 t1 = fold_relational_const (code, type, arg0, arg1);
9921 if (t1 != NULL_TREE)
9925 /* If the first operand is NaN, the result is constant. */
9926 if (TREE_CODE (arg0) == REAL_CST
9927 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9928 && (code != LTGT_EXPR || ! flag_trapping_math))
9930 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9933 return omit_one_operand (type, t1, arg1);
9936 /* If the second operand is NaN, the result is constant. */
9937 if (TREE_CODE (arg1) == REAL_CST
9938 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9939 && (code != LTGT_EXPR || ! flag_trapping_math))
9941 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9944 return omit_one_operand (type, t1, arg0);
9947 /* Simplify unordered comparison of something with itself. */
9948 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9949 && operand_equal_p (arg0, arg1, 0))
9950 return constant_boolean_node (1, type);
9952 if (code == LTGT_EXPR
9953 && !flag_trapping_math
9954 && operand_equal_p (arg0, arg1, 0))
9955 return constant_boolean_node (0, type);
9957 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9959 tree targ0 = strip_float_extensions (arg0);
9960 tree targ1 = strip_float_extensions (arg1);
9961 tree newtype = TREE_TYPE (targ0);
9963 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9964 newtype = TREE_TYPE (targ1);
9966 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9967 return fold_build2 (code, type, fold_convert (newtype, targ0),
9968 fold_convert (newtype, targ1));
9974 /* When pedantic, a compound expression can be neither an lvalue
9975 nor an integer constant expression. */
9976 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9978 /* Don't let (0, 0) be null pointer constant. */
9979 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9980 : fold_convert (type, arg1);
9981 return pedantic_non_lvalue (tem);
9984 if ((TREE_CODE (arg0) == REAL_CST
9985 && TREE_CODE (arg1) == REAL_CST)
9986 || (TREE_CODE (arg0) == INTEGER_CST
9987 && TREE_CODE (arg1) == INTEGER_CST))
9988 return build_complex (type, arg0, arg1);
9992 /* An ASSERT_EXPR should never be passed to fold_binary. */
9997 } /* switch (code) */
10000 /* Callback for walk_tree, looking for LABEL_EXPR.
10001 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10002 Do not check the sub-tree of GOTO_EXPR. */
10005 contains_label_1 (tree *tp,
10006 int *walk_subtrees,
10007 void *data ATTRIBUTE_UNUSED)
10009 switch (TREE_CODE (*tp))
10014 *walk_subtrees = 0;
10021 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10022 accessible from outside the sub-tree. Returns NULL_TREE if no
10023 addressable label is found. */
10026 contains_label_p (tree st)
10028 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10031 /* Fold a ternary expression of code CODE and type TYPE with operands
10032 OP0, OP1, and OP2. Return the folded expression if folding is
10033 successful. Otherwise, return NULL_TREE. */
10036 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10039 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10040 enum tree_code_class kind = TREE_CODE_CLASS (code);
10042 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10043 && TREE_CODE_LENGTH (code) == 3);
10045 /* Strip any conversions that don't change the mode. This is safe
10046 for every expression, except for a comparison expression because
10047 its signedness is derived from its operands. So, in the latter
10048 case, only strip conversions that don't change the signedness.
10050 Note that this is done as an internal manipulation within the
10051 constant folder, in order to find the simplest representation of
10052 the arguments so that their form can be studied. In any cases,
10053 the appropriate type conversions should be put back in the tree
10054 that will get out of the constant folder. */
10069 case COMPONENT_REF:
10070 if (TREE_CODE (arg0) == CONSTRUCTOR
10071 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10073 unsigned HOST_WIDE_INT idx;
10075 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10082 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10083 so all simple results must be passed through pedantic_non_lvalue. */
10084 if (TREE_CODE (arg0) == INTEGER_CST)
10086 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10087 tem = integer_zerop (arg0) ? op2 : op1;
10088 /* Only optimize constant conditions when the selected branch
10089 has the same type as the COND_EXPR. This avoids optimizing
10090 away "c ? x : throw", where the throw has a void type.
10091 Avoid throwing away that operand which contains label. */
10092 if ((!TREE_SIDE_EFFECTS (unused_op)
10093 || !contains_label_p (unused_op))
10094 && (! VOID_TYPE_P (TREE_TYPE (tem))
10095 || VOID_TYPE_P (type)))
10096 return pedantic_non_lvalue (tem);
10099 if (operand_equal_p (arg1, op2, 0))
10100 return pedantic_omit_one_operand (type, arg1, arg0);
10102 /* If we have A op B ? A : C, we may be able to convert this to a
10103 simpler expression, depending on the operation and the values
10104 of B and C. Signed zeros prevent all of these transformations,
10105 for reasons given above each one.
10107 Also try swapping the arguments and inverting the conditional. */
10108 if (COMPARISON_CLASS_P (arg0)
10109 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10110 arg1, TREE_OPERAND (arg0, 1))
10111 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10113 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10118 if (COMPARISON_CLASS_P (arg0)
10119 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10121 TREE_OPERAND (arg0, 1))
10122 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10124 tem = invert_truthvalue (arg0);
10125 if (COMPARISON_CLASS_P (tem))
10127 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10133 /* If the second operand is simpler than the third, swap them
10134 since that produces better jump optimization results. */
10135 if (truth_value_p (TREE_CODE (arg0))
10136 && tree_swap_operands_p (op1, op2, false))
10138 /* See if this can be inverted. If it can't, possibly because
10139 it was a floating-point inequality comparison, don't do
10141 tem = invert_truthvalue (arg0);
10143 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10144 return fold_build3 (code, type, tem, op2, op1);
10147 /* Convert A ? 1 : 0 to simply A. */
10148 if (integer_onep (op1)
10149 && integer_zerop (op2)
10150 /* If we try to convert OP0 to our type, the
10151 call to fold will try to move the conversion inside
10152 a COND, which will recurse. In that case, the COND_EXPR
10153 is probably the best choice, so leave it alone. */
10154 && type == TREE_TYPE (arg0))
10155 return pedantic_non_lvalue (arg0);
10157 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10158 over COND_EXPR in cases such as floating point comparisons. */
10159 if (integer_zerop (op1)
10160 && integer_onep (op2)
10161 && truth_value_p (TREE_CODE (arg0)))
10162 return pedantic_non_lvalue (fold_convert (type,
10163 invert_truthvalue (arg0)));
10165 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10166 if (TREE_CODE (arg0) == LT_EXPR
10167 && integer_zerop (TREE_OPERAND (arg0, 1))
10168 && integer_zerop (op2)
10169 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10170 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10171 TREE_TYPE (tem), tem, arg1));
10173 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10174 already handled above. */
10175 if (TREE_CODE (arg0) == BIT_AND_EXPR
10176 && integer_onep (TREE_OPERAND (arg0, 1))
10177 && integer_zerop (op2)
10178 && integer_pow2p (arg1))
10180 tree tem = TREE_OPERAND (arg0, 0);
10182 if (TREE_CODE (tem) == RSHIFT_EXPR
10183 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10184 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10185 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10186 return fold_build2 (BIT_AND_EXPR, type,
10187 TREE_OPERAND (tem, 0), arg1);
10190 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10191 is probably obsolete because the first operand should be a
10192 truth value (that's why we have the two cases above), but let's
10193 leave it in until we can confirm this for all front-ends. */
10194 if (integer_zerop (op2)
10195 && TREE_CODE (arg0) == NE_EXPR
10196 && integer_zerop (TREE_OPERAND (arg0, 1))
10197 && integer_pow2p (arg1)
10198 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10199 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10200 arg1, OEP_ONLY_CONST))
10201 return pedantic_non_lvalue (fold_convert (type,
10202 TREE_OPERAND (arg0, 0)));
10204 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10205 if (integer_zerop (op2)
10206 && truth_value_p (TREE_CODE (arg0))
10207 && truth_value_p (TREE_CODE (arg1)))
10208 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10210 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10211 if (integer_onep (op2)
10212 && truth_value_p (TREE_CODE (arg0))
10213 && truth_value_p (TREE_CODE (arg1)))
10215 /* Only perform transformation if ARG0 is easily inverted. */
10216 tem = invert_truthvalue (arg0);
10217 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10218 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10221 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10222 if (integer_zerop (arg1)
10223 && truth_value_p (TREE_CODE (arg0))
10224 && truth_value_p (TREE_CODE (op2)))
10226 /* Only perform transformation if ARG0 is easily inverted. */
10227 tem = invert_truthvalue (arg0);
10228 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10229 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10232 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10233 if (integer_onep (arg1)
10234 && truth_value_p (TREE_CODE (arg0))
10235 && truth_value_p (TREE_CODE (op2)))
10236 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10241 /* Check for a built-in function. */
10242 if (TREE_CODE (op0) == ADDR_EXPR
10243 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10244 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10245 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10248 case BIT_FIELD_REF:
10249 if (TREE_CODE (arg0) == VECTOR_CST
10250 && type == TREE_TYPE (TREE_TYPE (arg0))
10251 && host_integerp (arg1, 1)
10252 && host_integerp (op2, 1))
10254 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10255 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10258 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10259 && (idx % width) == 0
10260 && (idx = idx / width)
10261 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10263 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10264 while (idx-- > 0 && elements)
10265 elements = TREE_CHAIN (elements);
10267 return TREE_VALUE (elements);
10269 return fold_convert (type, integer_zero_node);
10276 } /* switch (code) */
10279 /* Perform constant folding and related simplification of EXPR.
10280 The related simplifications include x*1 => x, x*0 => 0, etc.,
10281 and application of the associative law.
10282 NOP_EXPR conversions may be removed freely (as long as we
10283 are careful not to change the type of the overall expression).
10284 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10285 but we can constant-fold them if they have constant operands. */
10287 #ifdef ENABLE_FOLD_CHECKING
10288 # define fold(x) fold_1 (x)
10289 static tree fold_1 (tree);
10295 const tree t = expr;
10296 enum tree_code code = TREE_CODE (t);
10297 enum tree_code_class kind = TREE_CODE_CLASS (code);
10300 /* Return right away if a constant. */
10301 if (kind == tcc_constant)
10304 if (IS_EXPR_CODE_CLASS (kind))
10306 tree type = TREE_TYPE (t);
10307 tree op0, op1, op2;
10309 switch (TREE_CODE_LENGTH (code))
10312 op0 = TREE_OPERAND (t, 0);
10313 tem = fold_unary (code, type, op0);
10314 return tem ? tem : expr;
10316 op0 = TREE_OPERAND (t, 0);
10317 op1 = TREE_OPERAND (t, 1);
10318 tem = fold_binary (code, type, op0, op1);
10319 return tem ? tem : expr;
10321 op0 = TREE_OPERAND (t, 0);
10322 op1 = TREE_OPERAND (t, 1);
10323 op2 = TREE_OPERAND (t, 2);
10324 tem = fold_ternary (code, type, op0, op1, op2);
10325 return tem ? tem : expr;
10334 return fold (DECL_INITIAL (t));
10338 } /* switch (code) */
10341 #ifdef ENABLE_FOLD_CHECKING
10344 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10345 static void fold_check_failed (tree, tree);
10346 void print_fold_checksum (tree);
10348 /* When --enable-checking=fold, compute a digest of expr before
10349 and after actual fold call to see if fold did not accidentally
10350 change original expr. */
10356 struct md5_ctx ctx;
10357 unsigned char checksum_before[16], checksum_after[16];
10360 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10361 md5_init_ctx (&ctx);
10362 fold_checksum_tree (expr, &ctx, ht);
10363 md5_finish_ctx (&ctx, checksum_before);
10366 ret = fold_1 (expr);
10368 md5_init_ctx (&ctx);
10369 fold_checksum_tree (expr, &ctx, ht);
10370 md5_finish_ctx (&ctx, checksum_after);
10373 if (memcmp (checksum_before, checksum_after, 16))
10374 fold_check_failed (expr, ret);
10380 print_fold_checksum (tree expr)
10382 struct md5_ctx ctx;
10383 unsigned char checksum[16], cnt;
10386 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10387 md5_init_ctx (&ctx);
10388 fold_checksum_tree (expr, &ctx, ht);
10389 md5_finish_ctx (&ctx, checksum);
10391 for (cnt = 0; cnt < 16; ++cnt)
10392 fprintf (stderr, "%02x", checksum[cnt]);
10393 putc ('\n', stderr);
10397 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10399 internal_error ("fold check: original tree changed by fold");
10403 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10406 enum tree_code code;
10407 char buf[sizeof (struct tree_function_decl)];
10412 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10413 <= sizeof (struct tree_function_decl))
10414 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10417 slot = htab_find_slot (ht, expr, INSERT);
10421 code = TREE_CODE (expr);
10422 if (TREE_CODE_CLASS (code) == tcc_declaration
10423 && DECL_ASSEMBLER_NAME_SET_P (expr))
10425 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10426 memcpy (buf, expr, tree_size (expr));
10428 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10430 else if (TREE_CODE_CLASS (code) == tcc_type
10431 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10432 || TYPE_CACHED_VALUES_P (expr)
10433 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10435 /* Allow these fields to be modified. */
10436 memcpy (buf, expr, tree_size (expr));
10438 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10439 TYPE_POINTER_TO (expr) = NULL;
10440 TYPE_REFERENCE_TO (expr) = NULL;
10441 if (TYPE_CACHED_VALUES_P (expr))
10443 TYPE_CACHED_VALUES_P (expr) = 0;
10444 TYPE_CACHED_VALUES (expr) = NULL;
10447 md5_process_bytes (expr, tree_size (expr), ctx);
10448 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10449 if (TREE_CODE_CLASS (code) != tcc_type
10450 && TREE_CODE_CLASS (code) != tcc_declaration
10451 && code != TREE_LIST)
10452 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10453 switch (TREE_CODE_CLASS (code))
10459 md5_process_bytes (TREE_STRING_POINTER (expr),
10460 TREE_STRING_LENGTH (expr), ctx);
10463 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10464 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10467 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10473 case tcc_exceptional:
10477 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10478 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10479 expr = TREE_CHAIN (expr);
10480 goto recursive_label;
10483 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10484 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10490 case tcc_expression:
10491 case tcc_reference:
10492 case tcc_comparison:
10495 case tcc_statement:
10496 len = TREE_CODE_LENGTH (code);
10497 for (i = 0; i < len; ++i)
10498 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10500 case tcc_declaration:
10501 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10502 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10503 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
10505 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10506 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10507 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10508 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10509 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10511 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10512 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10514 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10516 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10517 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10518 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10522 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10523 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10524 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10525 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10526 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10527 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10528 if (INTEGRAL_TYPE_P (expr)
10529 || SCALAR_FLOAT_TYPE_P (expr))
10531 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10532 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10534 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10535 if (TREE_CODE (expr) == RECORD_TYPE
10536 || TREE_CODE (expr) == UNION_TYPE
10537 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10538 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10539 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10548 /* Fold a unary tree expression with code CODE of type TYPE with an
10549 operand OP0. Return a folded expression if successful. Otherwise,
10550 return a tree expression with code CODE of type TYPE with an
10554 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10557 #ifdef ENABLE_FOLD_CHECKING
10558 unsigned char checksum_before[16], checksum_after[16];
10559 struct md5_ctx ctx;
10562 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10563 md5_init_ctx (&ctx);
10564 fold_checksum_tree (op0, &ctx, ht);
10565 md5_finish_ctx (&ctx, checksum_before);
10569 tem = fold_unary (code, type, op0);
10571 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10573 #ifdef ENABLE_FOLD_CHECKING
10574 md5_init_ctx (&ctx);
10575 fold_checksum_tree (op0, &ctx, ht);
10576 md5_finish_ctx (&ctx, checksum_after);
10579 if (memcmp (checksum_before, checksum_after, 16))
10580 fold_check_failed (op0, tem);
10585 /* Fold a binary tree expression with code CODE of type TYPE with
10586 operands OP0 and OP1. Return a folded expression if successful.
10587 Otherwise, return a tree expression with code CODE of type TYPE
10588 with operands OP0 and OP1. */
10591 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10595 #ifdef ENABLE_FOLD_CHECKING
10596 unsigned char checksum_before_op0[16],
10597 checksum_before_op1[16],
10598 checksum_after_op0[16],
10599 checksum_after_op1[16];
10600 struct md5_ctx ctx;
10603 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10604 md5_init_ctx (&ctx);
10605 fold_checksum_tree (op0, &ctx, ht);
10606 md5_finish_ctx (&ctx, checksum_before_op0);
10609 md5_init_ctx (&ctx);
10610 fold_checksum_tree (op1, &ctx, ht);
10611 md5_finish_ctx (&ctx, checksum_before_op1);
10615 tem = fold_binary (code, type, op0, op1);
10617 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10619 #ifdef ENABLE_FOLD_CHECKING
10620 md5_init_ctx (&ctx);
10621 fold_checksum_tree (op0, &ctx, ht);
10622 md5_finish_ctx (&ctx, checksum_after_op0);
10625 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10626 fold_check_failed (op0, tem);
10628 md5_init_ctx (&ctx);
10629 fold_checksum_tree (op1, &ctx, ht);
10630 md5_finish_ctx (&ctx, checksum_after_op1);
10633 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10634 fold_check_failed (op1, tem);
10639 /* Fold a ternary tree expression with code CODE of type TYPE with
10640 operands OP0, OP1, and OP2. Return a folded expression if
10641 successful. Otherwise, return a tree expression with code CODE of
10642 type TYPE with operands OP0, OP1, and OP2. */
10645 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10649 #ifdef ENABLE_FOLD_CHECKING
10650 unsigned char checksum_before_op0[16],
10651 checksum_before_op1[16],
10652 checksum_before_op2[16],
10653 checksum_after_op0[16],
10654 checksum_after_op1[16],
10655 checksum_after_op2[16];
10656 struct md5_ctx ctx;
10659 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10660 md5_init_ctx (&ctx);
10661 fold_checksum_tree (op0, &ctx, ht);
10662 md5_finish_ctx (&ctx, checksum_before_op0);
10665 md5_init_ctx (&ctx);
10666 fold_checksum_tree (op1, &ctx, ht);
10667 md5_finish_ctx (&ctx, checksum_before_op1);
10670 md5_init_ctx (&ctx);
10671 fold_checksum_tree (op2, &ctx, ht);
10672 md5_finish_ctx (&ctx, checksum_before_op2);
10676 tem = fold_ternary (code, type, op0, op1, op2);
10678 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10680 #ifdef ENABLE_FOLD_CHECKING
10681 md5_init_ctx (&ctx);
10682 fold_checksum_tree (op0, &ctx, ht);
10683 md5_finish_ctx (&ctx, checksum_after_op0);
10686 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10687 fold_check_failed (op0, tem);
10689 md5_init_ctx (&ctx);
10690 fold_checksum_tree (op1, &ctx, ht);
10691 md5_finish_ctx (&ctx, checksum_after_op1);
10694 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10695 fold_check_failed (op1, tem);
10697 md5_init_ctx (&ctx);
10698 fold_checksum_tree (op2, &ctx, ht);
10699 md5_finish_ctx (&ctx, checksum_after_op2);
10702 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10703 fold_check_failed (op2, tem);
10708 /* Perform constant folding and related simplification of initializer
10709 expression EXPR. These behave identically to "fold_buildN" but ignore
10710 potential run-time traps and exceptions that fold must preserve. */
10712 #define START_FOLD_INIT \
10713 int saved_signaling_nans = flag_signaling_nans;\
10714 int saved_trapping_math = flag_trapping_math;\
10715 int saved_rounding_math = flag_rounding_math;\
10716 int saved_trapv = flag_trapv;\
10717 flag_signaling_nans = 0;\
10718 flag_trapping_math = 0;\
10719 flag_rounding_math = 0;\
10722 #define END_FOLD_INIT \
10723 flag_signaling_nans = saved_signaling_nans;\
10724 flag_trapping_math = saved_trapping_math;\
10725 flag_rounding_math = saved_rounding_math;\
10726 flag_trapv = saved_trapv
10729 fold_build1_initializer (enum tree_code code, tree type, tree op)
10734 result = fold_build1 (code, type, op);
10741 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10746 result = fold_build2 (code, type, op0, op1);
10753 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10759 result = fold_build3 (code, type, op0, op1, op2);
10765 #undef START_FOLD_INIT
10766 #undef END_FOLD_INIT
10768 /* Determine if first argument is a multiple of second argument. Return 0 if
10769 it is not, or we cannot easily determined it to be.
10771 An example of the sort of thing we care about (at this point; this routine
10772 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10773 fold cases do now) is discovering that
10775 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10781 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10783 This code also handles discovering that
10785 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10787 is a multiple of 8 so we don't have to worry about dealing with a
10788 possible remainder.
10790 Note that we *look* inside a SAVE_EXPR only to determine how it was
10791 calculated; it is not safe for fold to do much of anything else with the
10792 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10793 at run time. For example, the latter example above *cannot* be implemented
10794 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10795 evaluation time of the original SAVE_EXPR is not necessarily the same at
10796 the time the new expression is evaluated. The only optimization of this
10797 sort that would be valid is changing
10799 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10803 SAVE_EXPR (I) * SAVE_EXPR (J)
10805 (where the same SAVE_EXPR (J) is used in the original and the
10806 transformed version). */
10809 multiple_of_p (tree type, tree top, tree bottom)
10811 if (operand_equal_p (top, bottom, 0))
10814 if (TREE_CODE (type) != INTEGER_TYPE)
10817 switch (TREE_CODE (top))
10820 /* Bitwise and provides a power of two multiple. If the mask is
10821 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10822 if (!integer_pow2p (bottom))
10827 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10828 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10832 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10833 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10836 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10840 op1 = TREE_OPERAND (top, 1);
10841 /* const_binop may not detect overflow correctly,
10842 so check for it explicitly here. */
10843 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10844 > TREE_INT_CST_LOW (op1)
10845 && TREE_INT_CST_HIGH (op1) == 0
10846 && 0 != (t1 = fold_convert (type,
10847 const_binop (LSHIFT_EXPR,
10850 && ! TREE_OVERFLOW (t1))
10851 return multiple_of_p (type, t1, bottom);
10856 /* Can't handle conversions from non-integral or wider integral type. */
10857 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10858 || (TYPE_PRECISION (type)
10859 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10862 /* .. fall through ... */
10865 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10868 if (TREE_CODE (bottom) != INTEGER_CST
10869 || (TYPE_UNSIGNED (type)
10870 && (tree_int_cst_sgn (top) < 0
10871 || tree_int_cst_sgn (bottom) < 0)))
10873 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10881 /* Return true if `t' is known to be non-negative. */
10884 tree_expr_nonnegative_p (tree t)
10886 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10889 switch (TREE_CODE (t))
10892 /* We can't return 1 if flag_wrapv is set because
10893 ABS_EXPR<INT_MIN> = INT_MIN. */
10894 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10899 return tree_int_cst_sgn (t) >= 0;
10902 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10905 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10906 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10907 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10909 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10910 both unsigned and at least 2 bits shorter than the result. */
10911 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10912 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10913 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10915 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10916 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10917 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10918 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10920 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10921 TYPE_PRECISION (inner2)) + 1;
10922 return prec < TYPE_PRECISION (TREE_TYPE (t));
10928 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10930 /* x * x for floating point x is always non-negative. */
10931 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10933 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10934 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10937 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10938 both unsigned and their total bits is shorter than the result. */
10939 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10940 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10941 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10943 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10944 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10945 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10946 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10947 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10948 < TYPE_PRECISION (TREE_TYPE (t));
10954 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10955 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10961 case TRUNC_DIV_EXPR:
10962 case CEIL_DIV_EXPR:
10963 case FLOOR_DIV_EXPR:
10964 case ROUND_DIV_EXPR:
10965 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10966 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10968 case TRUNC_MOD_EXPR:
10969 case CEIL_MOD_EXPR:
10970 case FLOOR_MOD_EXPR:
10971 case ROUND_MOD_EXPR:
10973 case NON_LVALUE_EXPR:
10975 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10977 case COMPOUND_EXPR:
10979 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10982 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10985 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10986 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10990 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10991 tree outer_type = TREE_TYPE (t);
10993 if (TREE_CODE (outer_type) == REAL_TYPE)
10995 if (TREE_CODE (inner_type) == REAL_TYPE)
10996 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10997 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10999 if (TYPE_UNSIGNED (inner_type))
11001 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11004 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11006 if (TREE_CODE (inner_type) == REAL_TYPE)
11007 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11008 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11009 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11010 && TYPE_UNSIGNED (inner_type);
11017 tree temp = TARGET_EXPR_SLOT (t);
11018 t = TARGET_EXPR_INITIAL (t);
11020 /* If the initializer is non-void, then it's a normal expression
11021 that will be assigned to the slot. */
11022 if (!VOID_TYPE_P (t))
11023 return tree_expr_nonnegative_p (t);
11025 /* Otherwise, the initializer sets the slot in some way. One common
11026 way is an assignment statement at the end of the initializer. */
11029 if (TREE_CODE (t) == BIND_EXPR)
11030 t = expr_last (BIND_EXPR_BODY (t));
11031 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11032 || TREE_CODE (t) == TRY_CATCH_EXPR)
11033 t = expr_last (TREE_OPERAND (t, 0));
11034 else if (TREE_CODE (t) == STATEMENT_LIST)
11039 if (TREE_CODE (t) == MODIFY_EXPR
11040 && TREE_OPERAND (t, 0) == temp)
11041 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11048 tree fndecl = get_callee_fndecl (t);
11049 tree arglist = TREE_OPERAND (t, 1);
11050 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11051 switch (DECL_FUNCTION_CODE (fndecl))
11053 CASE_FLT_FN (BUILT_IN_ACOS):
11054 CASE_FLT_FN (BUILT_IN_ACOSH):
11055 CASE_FLT_FN (BUILT_IN_CABS):
11056 CASE_FLT_FN (BUILT_IN_COSH):
11057 CASE_FLT_FN (BUILT_IN_ERFC):
11058 CASE_FLT_FN (BUILT_IN_EXP):
11059 CASE_FLT_FN (BUILT_IN_EXP10):
11060 CASE_FLT_FN (BUILT_IN_EXP2):
11061 CASE_FLT_FN (BUILT_IN_FABS):
11062 CASE_FLT_FN (BUILT_IN_FDIM):
11063 CASE_FLT_FN (BUILT_IN_HYPOT):
11064 CASE_FLT_FN (BUILT_IN_POW10):
11065 CASE_INT_FN (BUILT_IN_FFS):
11066 CASE_INT_FN (BUILT_IN_PARITY):
11067 CASE_INT_FN (BUILT_IN_POPCOUNT):
11071 CASE_FLT_FN (BUILT_IN_SQRT):
11072 /* sqrt(-0.0) is -0.0. */
11073 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11075 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11077 CASE_FLT_FN (BUILT_IN_ASINH):
11078 CASE_FLT_FN (BUILT_IN_ATAN):
11079 CASE_FLT_FN (BUILT_IN_ATANH):
11080 CASE_FLT_FN (BUILT_IN_CBRT):
11081 CASE_FLT_FN (BUILT_IN_CEIL):
11082 CASE_FLT_FN (BUILT_IN_ERF):
11083 CASE_FLT_FN (BUILT_IN_EXPM1):
11084 CASE_FLT_FN (BUILT_IN_FLOOR):
11085 CASE_FLT_FN (BUILT_IN_FMOD):
11086 CASE_FLT_FN (BUILT_IN_FREXP):
11087 CASE_FLT_FN (BUILT_IN_LCEIL):
11088 CASE_FLT_FN (BUILT_IN_LDEXP):
11089 CASE_FLT_FN (BUILT_IN_LFLOOR):
11090 CASE_FLT_FN (BUILT_IN_LLCEIL):
11091 CASE_FLT_FN (BUILT_IN_LLFLOOR):
11092 CASE_FLT_FN (BUILT_IN_LLRINT):
11093 CASE_FLT_FN (BUILT_IN_LLROUND):
11094 CASE_FLT_FN (BUILT_IN_LRINT):
11095 CASE_FLT_FN (BUILT_IN_LROUND):
11096 CASE_FLT_FN (BUILT_IN_MODF):
11097 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11098 CASE_FLT_FN (BUILT_IN_POW):
11099 CASE_FLT_FN (BUILT_IN_RINT):
11100 CASE_FLT_FN (BUILT_IN_ROUND):
11101 CASE_FLT_FN (BUILT_IN_SIGNBIT):
11102 CASE_FLT_FN (BUILT_IN_SINH):
11103 CASE_FLT_FN (BUILT_IN_TANH):
11104 CASE_FLT_FN (BUILT_IN_TRUNC):
11105 /* True if the 1st argument is nonnegative. */
11106 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11108 CASE_FLT_FN (BUILT_IN_FMAX):
11109 /* True if the 1st OR 2nd arguments are nonnegative. */
11110 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11111 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11113 CASE_FLT_FN (BUILT_IN_FMIN):
11114 /* True if the 1st AND 2nd arguments are nonnegative. */
11115 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11116 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11118 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11119 /* True if the 2nd argument is nonnegative. */
11120 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11127 /* ... fall through ... */
11130 if (truth_value_p (TREE_CODE (t)))
11131 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11135 /* We don't know sign of `t', so be conservative and return false. */
11139 /* Return true when T is an address and is known to be nonzero.
11140 For floating point we further ensure that T is not denormal.
11141 Similar logic is present in nonzero_address in rtlanal.h. */
11144 tree_expr_nonzero_p (tree t)
11146 tree type = TREE_TYPE (t);
11148 /* Doing something useful for floating point would need more work. */
11149 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11152 switch (TREE_CODE (t))
11155 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11158 /* We used to test for !integer_zerop here. This does not work correctly
11159 if TREE_CONSTANT_OVERFLOW (t). */
11160 return (TREE_INT_CST_LOW (t) != 0
11161 || TREE_INT_CST_HIGH (t) != 0);
11164 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11166 /* With the presence of negative values it is hard
11167 to say something. */
11168 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11169 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11171 /* One of operands must be positive and the other non-negative. */
11172 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11173 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11178 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11180 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11181 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11187 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11188 tree outer_type = TREE_TYPE (t);
11190 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11191 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11197 tree base = get_base_address (TREE_OPERAND (t, 0));
11202 /* Weak declarations may link to NULL. */
11203 if (VAR_OR_FUNCTION_DECL_P (base))
11204 return !DECL_WEAK (base);
11206 /* Constants are never weak. */
11207 if (CONSTANT_CLASS_P (base))
11214 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11215 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11218 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11219 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11222 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11224 /* When both operands are nonzero, then MAX must be too. */
11225 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11228 /* MAX where operand 0 is positive is positive. */
11229 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11231 /* MAX where operand 1 is positive is positive. */
11232 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11233 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11237 case COMPOUND_EXPR:
11240 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11243 case NON_LVALUE_EXPR:
11244 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11247 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11248 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11251 return alloca_call_p (t);
11259 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11260 attempt to fold the expression to a constant without modifying TYPE,
11263 If the expression could be simplified to a constant, then return
11264 the constant. If the expression would not be simplified to a
11265 constant, then return NULL_TREE. */
11268 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11270 tree tem = fold_binary (code, type, op0, op1);
11271 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11274 /* Given the components of a unary expression CODE, TYPE and OP0,
11275 attempt to fold the expression to a constant without modifying
11278 If the expression could be simplified to a constant, then return
11279 the constant. If the expression would not be simplified to a
11280 constant, then return NULL_TREE. */
11283 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11285 tree tem = fold_unary (code, type, op0);
11286 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11289 /* If EXP represents referencing an element in a constant string
11290 (either via pointer arithmetic or array indexing), return the
11291 tree representing the value accessed, otherwise return NULL. */
11294 fold_read_from_constant_string (tree exp)
11296 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11298 tree exp1 = TREE_OPERAND (exp, 0);
11302 if (TREE_CODE (exp) == INDIRECT_REF)
11303 string = string_constant (exp1, &index);
11306 tree low_bound = array_ref_low_bound (exp);
11307 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11309 /* Optimize the special-case of a zero lower bound.
11311 We convert the low_bound to sizetype to avoid some problems
11312 with constant folding. (E.g. suppose the lower bound is 1,
11313 and its mode is QI. Without the conversion,l (ARRAY
11314 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11315 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11316 if (! integer_zerop (low_bound))
11317 index = size_diffop (index, fold_convert (sizetype, low_bound));
11323 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11324 && TREE_CODE (string) == STRING_CST
11325 && TREE_CODE (index) == INTEGER_CST
11326 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11327 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11329 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11330 return fold_convert (TREE_TYPE (exp),
11331 build_int_cst (NULL_TREE,
11332 (TREE_STRING_POINTER (string)
11333 [TREE_INT_CST_LOW (index)])));
11338 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11339 an integer constant or real constant.
11341 TYPE is the type of the result. */
11344 fold_negate_const (tree arg0, tree type)
11346 tree t = NULL_TREE;
11348 switch (TREE_CODE (arg0))
11352 unsigned HOST_WIDE_INT low;
11353 HOST_WIDE_INT high;
11354 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11355 TREE_INT_CST_HIGH (arg0),
11357 t = build_int_cst_wide (type, low, high);
11358 t = force_fit_type (t, 1,
11359 (overflow | TREE_OVERFLOW (arg0))
11360 && !TYPE_UNSIGNED (type),
11361 TREE_CONSTANT_OVERFLOW (arg0));
11366 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11370 gcc_unreachable ();
11376 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11377 an integer constant or real constant.
11379 TYPE is the type of the result. */
11382 fold_abs_const (tree arg0, tree type)
11384 tree t = NULL_TREE;
11386 switch (TREE_CODE (arg0))
11389 /* If the value is unsigned, then the absolute value is
11390 the same as the ordinary value. */
11391 if (TYPE_UNSIGNED (type))
11393 /* Similarly, if the value is non-negative. */
11394 else if (INT_CST_LT (integer_minus_one_node, arg0))
11396 /* If the value is negative, then the absolute value is
11400 unsigned HOST_WIDE_INT low;
11401 HOST_WIDE_INT high;
11402 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11403 TREE_INT_CST_HIGH (arg0),
11405 t = build_int_cst_wide (type, low, high);
11406 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11407 TREE_CONSTANT_OVERFLOW (arg0));
11412 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11413 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11419 gcc_unreachable ();
11425 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11426 constant. TYPE is the type of the result. */
11429 fold_not_const (tree arg0, tree type)
11431 tree t = NULL_TREE;
11433 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11435 t = build_int_cst_wide (type,
11436 ~ TREE_INT_CST_LOW (arg0),
11437 ~ TREE_INT_CST_HIGH (arg0));
11438 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11439 TREE_CONSTANT_OVERFLOW (arg0));
11444 /* Given CODE, a relational operator, the target type, TYPE and two
11445 constant operands OP0 and OP1, return the result of the
11446 relational operation. If the result is not a compile time
11447 constant, then return NULL_TREE. */
11450 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11452 int result, invert;
11454 /* From here on, the only cases we handle are when the result is
11455 known to be a constant. */
11457 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11459 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11460 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11462 /* Handle the cases where either operand is a NaN. */
11463 if (real_isnan (c0) || real_isnan (c1))
11473 case UNORDERED_EXPR:
11487 if (flag_trapping_math)
11493 gcc_unreachable ();
11496 return constant_boolean_node (result, type);
11499 return constant_boolean_node (real_compare (code, c0, c1), type);
11502 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11504 To compute GT, swap the arguments and do LT.
11505 To compute GE, do LT and invert the result.
11506 To compute LE, swap the arguments, do LT and invert the result.
11507 To compute NE, do EQ and invert the result.
11509 Therefore, the code below must handle only EQ and LT. */
11511 if (code == LE_EXPR || code == GT_EXPR)
11516 code = swap_tree_comparison (code);
11519 /* Note that it is safe to invert for real values here because we
11520 have already handled the one case that it matters. */
11523 if (code == NE_EXPR || code == GE_EXPR)
11526 code = invert_tree_comparison (code, false);
11529 /* Compute a result for LT or EQ if args permit;
11530 Otherwise return T. */
11531 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11533 if (code == EQ_EXPR)
11534 result = tree_int_cst_equal (op0, op1);
11535 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11536 result = INT_CST_LT_UNSIGNED (op0, op1);
11538 result = INT_CST_LT (op0, op1);
11545 return constant_boolean_node (result, type);
11548 /* Build an expression for the a clean point containing EXPR with type TYPE.
11549 Don't build a cleanup point expression for EXPR which don't have side
11553 fold_build_cleanup_point_expr (tree type, tree expr)
11555 /* If the expression does not have side effects then we don't have to wrap
11556 it with a cleanup point expression. */
11557 if (!TREE_SIDE_EFFECTS (expr))
11560 /* If the expression is a return, check to see if the expression inside the
11561 return has no side effects or the right hand side of the modify expression
11562 inside the return. If either don't have side effects set we don't need to
11563 wrap the expression in a cleanup point expression. Note we don't check the
11564 left hand side of the modify because it should always be a return decl. */
11565 if (TREE_CODE (expr) == RETURN_EXPR)
11567 tree op = TREE_OPERAND (expr, 0);
11568 if (!op || !TREE_SIDE_EFFECTS (op))
11570 op = TREE_OPERAND (op, 1);
11571 if (!TREE_SIDE_EFFECTS (op))
11575 return build1 (CLEANUP_POINT_EXPR, type, expr);
11578 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11579 avoid confusing the gimplify process. */
11582 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11584 /* The size of the object is not relevant when talking about its address. */
11585 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11586 t = TREE_OPERAND (t, 0);
11588 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11589 if (TREE_CODE (t) == INDIRECT_REF
11590 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11592 t = TREE_OPERAND (t, 0);
11593 if (TREE_TYPE (t) != ptrtype)
11594 t = build1 (NOP_EXPR, ptrtype, t);
11600 while (handled_component_p (base))
11601 base = TREE_OPERAND (base, 0);
11603 TREE_ADDRESSABLE (base) = 1;
11605 t = build1 (ADDR_EXPR, ptrtype, t);
11612 build_fold_addr_expr (tree t)
11614 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11617 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11618 of an indirection through OP0, or NULL_TREE if no simplification is
11622 fold_indirect_ref_1 (tree type, tree op0)
11628 subtype = TREE_TYPE (sub);
11629 if (!POINTER_TYPE_P (subtype))
11632 if (TREE_CODE (sub) == ADDR_EXPR)
11634 tree op = TREE_OPERAND (sub, 0);
11635 tree optype = TREE_TYPE (op);
11636 /* *&p => p; make sure to handle *&"str"[cst] here. */
11637 if (type == optype)
11639 tree fop = fold_read_from_constant_string (op);
11645 /* *(foo *)&fooarray => fooarray[0] */
11646 else if (TREE_CODE (optype) == ARRAY_TYPE
11647 && type == TREE_TYPE (optype))
11649 tree type_domain = TYPE_DOMAIN (optype);
11650 tree min_val = size_zero_node;
11651 if (type_domain && TYPE_MIN_VALUE (type_domain))
11652 min_val = TYPE_MIN_VALUE (type_domain);
11653 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11657 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11658 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11659 && type == TREE_TYPE (TREE_TYPE (subtype)))
11662 tree min_val = size_zero_node;
11663 sub = build_fold_indirect_ref (sub);
11664 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11665 if (type_domain && TYPE_MIN_VALUE (type_domain))
11666 min_val = TYPE_MIN_VALUE (type_domain);
11667 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11673 /* Builds an expression for an indirection through T, simplifying some
11677 build_fold_indirect_ref (tree t)
11679 tree type = TREE_TYPE (TREE_TYPE (t));
11680 tree sub = fold_indirect_ref_1 (type, t);
11685 return build1 (INDIRECT_REF, type, t);
11688 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11691 fold_indirect_ref (tree t)
11693 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11701 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11702 whose result is ignored. The type of the returned tree need not be
11703 the same as the original expression. */
11706 fold_ignored_result (tree t)
11708 if (!TREE_SIDE_EFFECTS (t))
11709 return integer_zero_node;
11712 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11715 t = TREE_OPERAND (t, 0);
11719 case tcc_comparison:
11720 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11721 t = TREE_OPERAND (t, 0);
11722 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11723 t = TREE_OPERAND (t, 1);
11728 case tcc_expression:
11729 switch (TREE_CODE (t))
11731 case COMPOUND_EXPR:
11732 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11734 t = TREE_OPERAND (t, 0);
11738 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11739 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11741 t = TREE_OPERAND (t, 0);
11754 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11755 This can only be applied to objects of a sizetype. */
11758 round_up (tree value, int divisor)
11760 tree div = NULL_TREE;
11762 gcc_assert (divisor > 0);
11766 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11767 have to do anything. Only do this when we are not given a const,
11768 because in that case, this check is more expensive than just
11770 if (TREE_CODE (value) != INTEGER_CST)
11772 div = build_int_cst (TREE_TYPE (value), divisor);
11774 if (multiple_of_p (TREE_TYPE (value), value, div))
11778 /* If divisor is a power of two, simplify this to bit manipulation. */
11779 if (divisor == (divisor & -divisor))
11783 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11784 value = size_binop (PLUS_EXPR, value, t);
11785 t = build_int_cst (TREE_TYPE (value), -divisor);
11786 value = size_binop (BIT_AND_EXPR, value, t);
11791 div = build_int_cst (TREE_TYPE (value), divisor);
11792 value = size_binop (CEIL_DIV_EXPR, value, div);
11793 value = size_binop (MULT_EXPR, value, div);
11799 /* Likewise, but round down. */
11802 round_down (tree value, int divisor)
11804 tree div = NULL_TREE;
11806 gcc_assert (divisor > 0);
11810 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11811 have to do anything. Only do this when we are not given a const,
11812 because in that case, this check is more expensive than just
11814 if (TREE_CODE (value) != INTEGER_CST)
11816 div = build_int_cst (TREE_TYPE (value), divisor);
11818 if (multiple_of_p (TREE_TYPE (value), value, div))
11822 /* If divisor is a power of two, simplify this to bit manipulation. */
11823 if (divisor == (divisor & -divisor))
11827 t = build_int_cst (TREE_TYPE (value), -divisor);
11828 value = size_binop (BIT_AND_EXPR, value, t);
11833 div = build_int_cst (TREE_TYPE (value), divisor);
11834 value = size_binop (FLOOR_DIV_EXPR, value, div);
11835 value = size_binop (MULT_EXPR, value, div);
11841 /* Returns the pointer to the base of the object addressed by EXP and
11842 extracts the information about the offset of the access, storing it
11843 to PBITPOS and POFFSET. */
11846 split_address_to_core_and_offset (tree exp,
11847 HOST_WIDE_INT *pbitpos, tree *poffset)
11850 enum machine_mode mode;
11851 int unsignedp, volatilep;
11852 HOST_WIDE_INT bitsize;
11854 if (TREE_CODE (exp) == ADDR_EXPR)
11856 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11857 poffset, &mode, &unsignedp, &volatilep,
11859 core = build_fold_addr_expr (core);
11865 *poffset = NULL_TREE;
11871 /* Returns true if addresses of E1 and E2 differ by a constant, false
11872 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11875 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11878 HOST_WIDE_INT bitpos1, bitpos2;
11879 tree toffset1, toffset2, tdiff, type;
11881 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11882 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11884 if (bitpos1 % BITS_PER_UNIT != 0
11885 || bitpos2 % BITS_PER_UNIT != 0
11886 || !operand_equal_p (core1, core2, 0))
11889 if (toffset1 && toffset2)
11891 type = TREE_TYPE (toffset1);
11892 if (type != TREE_TYPE (toffset2))
11893 toffset2 = fold_convert (type, toffset2);
11895 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11896 if (!cst_and_fits_in_hwi (tdiff))
11899 *diff = int_cst_value (tdiff);
11901 else if (toffset1 || toffset2)
11903 /* If only one of the offsets is non-constant, the difference cannot
11910 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11914 /* Simplify the floating point expression EXP when the sign of the
11915 result is not significant. Return NULL_TREE if no simplification
11919 fold_strip_sign_ops (tree exp)
11923 switch (TREE_CODE (exp))
11927 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11928 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11932 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11934 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11935 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11936 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11937 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11938 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11939 arg1 ? arg1 : TREE_OPERAND (exp, 1));