1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 || (overflowable > 0 && sign_extended_type))
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
286 TREE_CONSTANT_OVERFLOW (t) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Each argument is given as two `HOST_WIDE_INT' pieces.
295 One argument is L1 and H1; the other, L2 and H2.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
299 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
300 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
301 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
303 unsigned HOST_WIDE_INT l;
307 h = h1 + h2 + (l < l1);
311 return OVERFLOW_SUM_SIGN (h1, h2, h);
314 /* Negate a doubleword integer with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
327 return (*hv & h1) < 0;
337 /* Multiply two doubleword integers with doubleword result.
338 Return nonzero if the operation overflows, assuming it's signed.
339 Each argument is given as two `HOST_WIDE_INT' pieces.
340 One argument is L1 and H1; the other, L2 and H2.
341 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
345 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 HOST_WIDE_INT arg1[4];
349 HOST_WIDE_INT arg2[4];
350 HOST_WIDE_INT prod[4 * 2];
351 unsigned HOST_WIDE_INT carry;
353 unsigned HOST_WIDE_INT toplow, neglow;
354 HOST_WIDE_INT tophigh, neghigh;
356 encode (arg1, l1, h1);
357 encode (arg2, l2, h2);
359 memset (prod, 0, sizeof prod);
361 for (i = 0; i < 4; i++)
364 for (j = 0; j < 4; j++)
367 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
368 carry += arg1[i] * arg2[j];
369 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
371 prod[k] = LOWPART (carry);
372 carry = HIGHPART (carry);
377 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
379 /* Check for overflow by calculating the top half of the answer in full;
380 it should agree with the low half's sign bit. */
381 decode (prod + 4, &toplow, &tophigh);
384 neg_double (l2, h2, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
389 neg_double (l1, h1, &neglow, &neghigh);
390 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
392 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
395 /* Shift the doubleword integer in L1, H1 left by COUNT places
396 keeping only PREC bits of result.
397 Shift right if COUNT is negative.
398 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
402 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
403 HOST_WIDE_INT count, unsigned int prec,
404 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
406 unsigned HOST_WIDE_INT signmask;
410 rshift_double (l1, h1, -count, prec, lv, hv, arith);
414 if (SHIFT_COUNT_TRUNCATED)
417 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
419 /* Shifting by the host word size is undefined according to the
420 ANSI standard, so we must handle this as a special case. */
424 else if (count >= HOST_BITS_PER_WIDE_INT)
426 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
431 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
432 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
436 /* Sign extend all bits that are beyond the precision. */
438 signmask = -((prec > HOST_BITS_PER_WIDE_INT
439 ? ((unsigned HOST_WIDE_INT) *hv
440 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
441 : (*lv >> (prec - 1))) & 1);
443 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
445 else if (prec >= HOST_BITS_PER_WIDE_INT)
447 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
448 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
453 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
454 *lv |= signmask << prec;
458 /* Shift the doubleword integer in L1, H1 right by COUNT places
459 keeping only PREC bits of result. COUNT must be positive.
460 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
461 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
464 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
465 HOST_WIDE_INT count, unsigned int prec,
466 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
469 unsigned HOST_WIDE_INT signmask;
472 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
475 if (SHIFT_COUNT_TRUNCATED)
478 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
480 /* Shifting by the host word size is undefined according to the
481 ANSI standard, so we must handle this as a special case. */
485 else if (count >= HOST_BITS_PER_WIDE_INT)
488 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
492 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
494 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
497 /* Zero / sign extend all bits that are beyond the precision. */
499 if (count >= (HOST_WIDE_INT)prec)
504 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
506 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
508 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
509 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
514 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
515 *lv |= signmask << (prec - count);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result.
521 Rotate right if COUNT is negative.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
525 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
526 HOST_WIDE_INT count, unsigned int prec,
527 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
529 unsigned HOST_WIDE_INT s1l, s2l;
530 HOST_WIDE_INT s1h, s2h;
536 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
537 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
542 /* Rotate the doubleword integer in L1, H1 left by COUNT places
543 keeping only PREC bits of result. COUNT must be positive.
544 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
547 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
548 HOST_WIDE_INT count, unsigned int prec,
549 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
551 unsigned HOST_WIDE_INT s1l, s2l;
552 HOST_WIDE_INT s1h, s2h;
558 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
559 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
564 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
565 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
566 CODE is a tree code for a kind of division, one of
567 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
569 It controls how the quotient is rounded to an integer.
570 Return nonzero if the operation overflows.
571 UNS nonzero says do unsigned division. */
574 div_and_round_double (enum tree_code code, int uns,
575 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
576 HOST_WIDE_INT hnum_orig,
577 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
578 HOST_WIDE_INT hden_orig,
579 unsigned HOST_WIDE_INT *lquo,
580 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
584 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
585 HOST_WIDE_INT den[4], quo[4];
587 unsigned HOST_WIDE_INT work;
588 unsigned HOST_WIDE_INT carry = 0;
589 unsigned HOST_WIDE_INT lnum = lnum_orig;
590 HOST_WIDE_INT hnum = hnum_orig;
591 unsigned HOST_WIDE_INT lden = lden_orig;
592 HOST_WIDE_INT hden = hden_orig;
595 if (hden == 0 && lden == 0)
596 overflow = 1, lden = 1;
598 /* Calculate quotient sign and convert operands to unsigned. */
604 /* (minimum integer) / (-1) is the only overflow case. */
605 if (neg_double (lnum, hnum, &lnum, &hnum)
606 && ((HOST_WIDE_INT) lden & hden) == -1)
612 neg_double (lden, hden, &lden, &hden);
616 if (hnum == 0 && hden == 0)
617 { /* single precision */
619 /* This unsigned division rounds toward zero. */
625 { /* trivial case: dividend < divisor */
626 /* hden != 0 already checked. */
633 memset (quo, 0, sizeof quo);
635 memset (num, 0, sizeof num); /* to zero 9th element */
636 memset (den, 0, sizeof den);
638 encode (num, lnum, hnum);
639 encode (den, lden, hden);
641 /* Special code for when the divisor < BASE. */
642 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
644 /* hnum != 0 already checked. */
645 for (i = 4 - 1; i >= 0; i--)
647 work = num[i] + carry * BASE;
648 quo[i] = work / lden;
654 /* Full double precision division,
655 with thanks to Don Knuth's "Seminumerical Algorithms". */
656 int num_hi_sig, den_hi_sig;
657 unsigned HOST_WIDE_INT quo_est, scale;
659 /* Find the highest nonzero divisor digit. */
660 for (i = 4 - 1;; i--)
667 /* Insure that the first digit of the divisor is at least BASE/2.
668 This is required by the quotient digit estimation algorithm. */
670 scale = BASE / (den[den_hi_sig] + 1);
672 { /* scale divisor and dividend */
674 for (i = 0; i <= 4 - 1; i++)
676 work = (num[i] * scale) + carry;
677 num[i] = LOWPART (work);
678 carry = HIGHPART (work);
683 for (i = 0; i <= 4 - 1; i++)
685 work = (den[i] * scale) + carry;
686 den[i] = LOWPART (work);
687 carry = HIGHPART (work);
688 if (den[i] != 0) den_hi_sig = i;
695 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
697 /* Guess the next quotient digit, quo_est, by dividing the first
698 two remaining dividend digits by the high order quotient digit.
699 quo_est is never low and is at most 2 high. */
700 unsigned HOST_WIDE_INT tmp;
702 num_hi_sig = i + den_hi_sig + 1;
703 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
704 if (num[num_hi_sig] != den[den_hi_sig])
705 quo_est = work / den[den_hi_sig];
709 /* Refine quo_est so it's usually correct, and at most one high. */
710 tmp = work - quo_est * den[den_hi_sig];
712 && (den[den_hi_sig - 1] * quo_est
713 > (tmp * BASE + num[num_hi_sig - 2])))
716 /* Try QUO_EST as the quotient digit, by multiplying the
717 divisor by QUO_EST and subtracting from the remaining dividend.
718 Keep in mind that QUO_EST is the I - 1st digit. */
721 for (j = 0; j <= den_hi_sig; j++)
723 work = quo_est * den[j] + carry;
724 carry = HIGHPART (work);
725 work = num[i + j] - LOWPART (work);
726 num[i + j] = LOWPART (work);
727 carry += HIGHPART (work) != 0;
730 /* If quo_est was high by one, then num[i] went negative and
731 we need to correct things. */
732 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
735 carry = 0; /* add divisor back in */
736 for (j = 0; j <= den_hi_sig; j++)
738 work = num[i + j] + den[j] + carry;
739 carry = HIGHPART (work);
740 num[i + j] = LOWPART (work);
743 num [num_hi_sig] += carry;
746 /* Store the quotient digit. */
751 decode (quo, lquo, hquo);
754 /* If result is negative, make it so. */
756 neg_double (*lquo, *hquo, lquo, hquo);
758 /* Compute trial remainder: rem = num - (quo * den) */
759 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
760 neg_double (*lrem, *hrem, lrem, hrem);
761 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
766 case TRUNC_MOD_EXPR: /* round toward zero */
767 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
771 case FLOOR_MOD_EXPR: /* round toward negative infinity */
772 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
775 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
783 case CEIL_MOD_EXPR: /* round toward positive infinity */
784 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
794 case ROUND_MOD_EXPR: /* round to closest integer */
796 unsigned HOST_WIDE_INT labs_rem = *lrem;
797 HOST_WIDE_INT habs_rem = *hrem;
798 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
799 HOST_WIDE_INT habs_den = hden, htwice;
801 /* Get absolute values. */
803 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
805 neg_double (lden, hden, &labs_den, &habs_den);
807 /* If (2 * abs (lrem) >= abs (lden)) */
808 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
809 labs_rem, habs_rem, <wice, &htwice);
811 if (((unsigned HOST_WIDE_INT) habs_den
812 < (unsigned HOST_WIDE_INT) htwice)
813 || (((unsigned HOST_WIDE_INT) habs_den
814 == (unsigned HOST_WIDE_INT) htwice)
815 && (labs_den < ltwice)))
819 add_double (*lquo, *hquo,
820 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
835 /* Compute true remainder: rem = num - (quo * den) */
836 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
837 neg_double (*lrem, *hrem, lrem, hrem);
838 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
842 /* If ARG2 divides ARG1 with zero remainder, carries out the division
843 of type CODE and returns the quotient.
844 Otherwise returns NULL_TREE. */
847 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
849 unsigned HOST_WIDE_INT int1l, int2l;
850 HOST_WIDE_INT int1h, int2h;
851 unsigned HOST_WIDE_INT quol, reml;
852 HOST_WIDE_INT quoh, remh;
853 tree type = TREE_TYPE (arg1);
854 int uns = TYPE_UNSIGNED (type);
856 int1l = TREE_INT_CST_LOW (arg1);
857 int1h = TREE_INT_CST_HIGH (arg1);
858 int2l = TREE_INT_CST_LOW (arg2);
859 int2h = TREE_INT_CST_HIGH (arg2);
861 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
862 &quol, &quoh, &reml, &remh);
863 if (remh != 0 || reml != 0)
866 return build_int_cst_wide (type, quol, quoh);
869 /* Return true if the built-in mathematical function specified by CODE
870 is odd, i.e. -f(x) == f(-x). */
873 negate_mathfn_p (enum built_in_function code)
877 CASE_FLT_FN (BUILT_IN_ASIN):
878 CASE_FLT_FN (BUILT_IN_ASINH):
879 CASE_FLT_FN (BUILT_IN_ATAN):
880 CASE_FLT_FN (BUILT_IN_ATANH):
881 CASE_FLT_FN (BUILT_IN_CBRT):
882 CASE_FLT_FN (BUILT_IN_SIN):
883 CASE_FLT_FN (BUILT_IN_SINH):
884 CASE_FLT_FN (BUILT_IN_TAN):
885 CASE_FLT_FN (BUILT_IN_TANH):
894 /* Check whether we may negate an integer constant T without causing
898 may_negate_without_overflow_p (tree t)
900 unsigned HOST_WIDE_INT val;
904 gcc_assert (TREE_CODE (t) == INTEGER_CST);
906 type = TREE_TYPE (t);
907 if (TYPE_UNSIGNED (type))
910 prec = TYPE_PRECISION (type);
911 if (prec > HOST_BITS_PER_WIDE_INT)
913 if (TREE_INT_CST_LOW (t) != 0)
915 prec -= HOST_BITS_PER_WIDE_INT;
916 val = TREE_INT_CST_HIGH (t);
919 val = TREE_INT_CST_LOW (t);
920 if (prec < HOST_BITS_PER_WIDE_INT)
921 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
922 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
925 /* Determine whether an expression T can be cheaply negated using
926 the function negate_expr. */
929 negate_expr_p (tree t)
936 type = TREE_TYPE (t);
939 switch (TREE_CODE (t))
942 if (TYPE_UNSIGNED (type) || ! flag_trapv)
945 /* Check that -CST will not overflow type. */
946 return may_negate_without_overflow_p (t);
948 return INTEGRAL_TYPE_P (type)
949 && (TYPE_UNSIGNED (type)
950 || (flag_wrapv && !flag_trapv));
957 return negate_expr_p (TREE_REALPART (t))
958 && negate_expr_p (TREE_IMAGPART (t));
961 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
963 /* -(A + B) -> (-B) - A. */
964 if (negate_expr_p (TREE_OPERAND (t, 1))
965 && reorder_operands_p (TREE_OPERAND (t, 0),
966 TREE_OPERAND (t, 1)))
968 /* -(A + B) -> (-A) - B. */
969 return negate_expr_p (TREE_OPERAND (t, 0));
972 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
973 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
974 && reorder_operands_p (TREE_OPERAND (t, 0),
975 TREE_OPERAND (t, 1));
978 if (TYPE_UNSIGNED (TREE_TYPE (t)))
984 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
994 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
996 return negate_expr_p (TREE_OPERAND (t, 1))
997 || negate_expr_p (TREE_OPERAND (t, 0));
1000 /* Negate -((double)float) as (double)(-float). */
1001 if (TREE_CODE (type) == REAL_TYPE)
1003 tree tem = strip_float_extensions (t);
1005 return negate_expr_p (tem);
1010 /* Negate -f(x) as f(-x). */
1011 if (negate_mathfn_p (builtin_mathfn_code (t)))
1012 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1016 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1017 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1019 tree op1 = TREE_OPERAND (t, 1);
1020 if (TREE_INT_CST_HIGH (op1) == 0
1021 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1022 == TREE_INT_CST_LOW (op1))
1033 /* Given T, an expression, return the negation of T. Allow for T to be
1034 null, in which case return null. */
1037 negate_expr (tree t)
1045 type = TREE_TYPE (t);
1046 STRIP_SIGN_NOPS (t);
1048 switch (TREE_CODE (t))
1050 /* Convert - (~A) to A + 1. */
1052 if (INTEGRAL_TYPE_P (type)
1053 && (TYPE_UNSIGNED (type)
1054 || (flag_wrapv && !flag_trapv)))
1055 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1056 build_int_cst (type, 1));
1060 tem = fold_negate_const (t, type);
1061 if (! TREE_OVERFLOW (tem)
1062 || TYPE_UNSIGNED (type)
1068 tem = fold_negate_const (t, type);
1069 /* Two's complement FP formats, such as c4x, may overflow. */
1070 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1071 return fold_convert (type, tem);
1076 tree rpart = negate_expr (TREE_REALPART (t));
1077 tree ipart = negate_expr (TREE_IMAGPART (t));
1079 if ((TREE_CODE (rpart) == REAL_CST
1080 && TREE_CODE (ipart) == REAL_CST)
1081 || (TREE_CODE (rpart) == INTEGER_CST
1082 && TREE_CODE (ipart) == INTEGER_CST))
1083 return build_complex (type, rpart, ipart);
1088 return fold_convert (type, TREE_OPERAND (t, 0));
1091 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1093 /* -(A + B) -> (-B) - A. */
1094 if (negate_expr_p (TREE_OPERAND (t, 1))
1095 && reorder_operands_p (TREE_OPERAND (t, 0),
1096 TREE_OPERAND (t, 1)))
1098 tem = negate_expr (TREE_OPERAND (t, 1));
1099 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1100 tem, TREE_OPERAND (t, 0));
1101 return fold_convert (type, tem);
1104 /* -(A + B) -> (-A) - B. */
1105 if (negate_expr_p (TREE_OPERAND (t, 0)))
1107 tem = negate_expr (TREE_OPERAND (t, 0));
1108 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1109 tem, TREE_OPERAND (t, 1));
1110 return fold_convert (type, tem);
1116 /* - (A - B) -> B - A */
1117 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1118 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1119 return fold_convert (type,
1120 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1121 TREE_OPERAND (t, 1),
1122 TREE_OPERAND (t, 0)));
1126 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1132 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1134 tem = TREE_OPERAND (t, 1);
1135 if (negate_expr_p (tem))
1136 return fold_convert (type,
1137 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1138 TREE_OPERAND (t, 0),
1139 negate_expr (tem)));
1140 tem = TREE_OPERAND (t, 0);
1141 if (negate_expr_p (tem))
1142 return fold_convert (type,
1143 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1145 TREE_OPERAND (t, 1)));
1149 case TRUNC_DIV_EXPR:
1150 case ROUND_DIV_EXPR:
1151 case FLOOR_DIV_EXPR:
1153 case EXACT_DIV_EXPR:
1154 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1156 tem = TREE_OPERAND (t, 1);
1157 if (negate_expr_p (tem))
1158 return fold_convert (type,
1159 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1160 TREE_OPERAND (t, 0),
1161 negate_expr (tem)));
1162 tem = TREE_OPERAND (t, 0);
1163 if (negate_expr_p (tem))
1164 return fold_convert (type,
1165 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1167 TREE_OPERAND (t, 1)));
1172 /* Convert -((double)float) into (double)(-float). */
1173 if (TREE_CODE (type) == REAL_TYPE)
1175 tem = strip_float_extensions (t);
1176 if (tem != t && negate_expr_p (tem))
1177 return fold_convert (type, negate_expr (tem));
1182 /* Negate -f(x) as f(-x). */
1183 if (negate_mathfn_p (builtin_mathfn_code (t))
1184 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1186 tree fndecl, arg, arglist;
1188 fndecl = get_callee_fndecl (t);
1189 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1190 arglist = build_tree_list (NULL_TREE, arg);
1191 return build_function_call_expr (fndecl, arglist);
1196 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1197 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1199 tree op1 = TREE_OPERAND (t, 1);
1200 if (TREE_INT_CST_HIGH (op1) == 0
1201 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1202 == TREE_INT_CST_LOW (op1))
1204 tree ntype = TYPE_UNSIGNED (type)
1205 ? lang_hooks.types.signed_type (type)
1206 : lang_hooks.types.unsigned_type (type);
1207 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1208 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1209 return fold_convert (type, temp);
1218 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1219 return fold_convert (type, tem);
1222 /* Split a tree IN into a constant, literal and variable parts that could be
1223 combined with CODE to make IN. "constant" means an expression with
1224 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1225 commutative arithmetic operation. Store the constant part into *CONP,
1226 the literal in *LITP and return the variable part. If a part isn't
1227 present, set it to null. If the tree does not decompose in this way,
1228 return the entire tree as the variable part and the other parts as null.
1230 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1231 case, we negate an operand that was subtracted. Except if it is a
1232 literal for which we use *MINUS_LITP instead.
1234 If NEGATE_P is true, we are negating all of IN, again except a literal
1235 for which we use *MINUS_LITP instead.
1237 If IN is itself a literal or constant, return it as appropriate.
1239 Note that we do not guarantee that any of the three values will be the
1240 same type as IN, but they will have the same signedness and mode. */
1243 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1244 tree *minus_litp, int negate_p)
1252 /* Strip any conversions that don't change the machine mode or signedness. */
1253 STRIP_SIGN_NOPS (in);
1255 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1257 else if (TREE_CODE (in) == code
1258 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1259 /* We can associate addition and subtraction together (even
1260 though the C standard doesn't say so) for integers because
1261 the value is not affected. For reals, the value might be
1262 affected, so we can't. */
1263 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1264 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1266 tree op0 = TREE_OPERAND (in, 0);
1267 tree op1 = TREE_OPERAND (in, 1);
1268 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1269 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1271 /* First see if either of the operands is a literal, then a constant. */
1272 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1273 *litp = op0, op0 = 0;
1274 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1275 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1277 if (op0 != 0 && TREE_CONSTANT (op0))
1278 *conp = op0, op0 = 0;
1279 else if (op1 != 0 && TREE_CONSTANT (op1))
1280 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1282 /* If we haven't dealt with either operand, this is not a case we can
1283 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1284 if (op0 != 0 && op1 != 0)
1289 var = op1, neg_var_p = neg1_p;
1291 /* Now do any needed negations. */
1293 *minus_litp = *litp, *litp = 0;
1295 *conp = negate_expr (*conp);
1297 var = negate_expr (var);
1299 else if (TREE_CONSTANT (in))
1307 *minus_litp = *litp, *litp = 0;
1308 else if (*minus_litp)
1309 *litp = *minus_litp, *minus_litp = 0;
1310 *conp = negate_expr (*conp);
1311 var = negate_expr (var);
1317 /* Re-associate trees split by the above function. T1 and T2 are either
1318 expressions to associate or null. Return the new expression, if any. If
1319 we build an operation, do it in TYPE and with CODE. */
1322 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1329 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1330 try to fold this since we will have infinite recursion. But do
1331 deal with any NEGATE_EXPRs. */
1332 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1333 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1335 if (code == PLUS_EXPR)
1337 if (TREE_CODE (t1) == NEGATE_EXPR)
1338 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1339 fold_convert (type, TREE_OPERAND (t1, 0)));
1340 else if (TREE_CODE (t2) == NEGATE_EXPR)
1341 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1342 fold_convert (type, TREE_OPERAND (t2, 0)));
1343 else if (integer_zerop (t2))
1344 return fold_convert (type, t1);
1346 else if (code == MINUS_EXPR)
1348 if (integer_zerop (t2))
1349 return fold_convert (type, t1);
1352 return build2 (code, type, fold_convert (type, t1),
1353 fold_convert (type, t2));
1356 return fold_build2 (code, type, fold_convert (type, t1),
1357 fold_convert (type, t2));
1360 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1361 to produce a new constant. Return NULL_TREE if we don't know how
1362 to evaluate CODE at compile-time.
1364 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1367 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1369 unsigned HOST_WIDE_INT int1l, int2l;
1370 HOST_WIDE_INT int1h, int2h;
1371 unsigned HOST_WIDE_INT low;
1373 unsigned HOST_WIDE_INT garbagel;
1374 HOST_WIDE_INT garbageh;
1376 tree type = TREE_TYPE (arg1);
1377 int uns = TYPE_UNSIGNED (type);
1379 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1382 int1l = TREE_INT_CST_LOW (arg1);
1383 int1h = TREE_INT_CST_HIGH (arg1);
1384 int2l = TREE_INT_CST_LOW (arg2);
1385 int2h = TREE_INT_CST_HIGH (arg2);
1390 low = int1l | int2l, hi = int1h | int2h;
1394 low = int1l ^ int2l, hi = int1h ^ int2h;
1398 low = int1l & int2l, hi = int1h & int2h;
1404 /* It's unclear from the C standard whether shifts can overflow.
1405 The following code ignores overflow; perhaps a C standard
1406 interpretation ruling is needed. */
1407 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1414 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1419 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1423 neg_double (int2l, int2h, &low, &hi);
1424 add_double (int1l, int1h, low, hi, &low, &hi);
1425 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1429 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1432 case TRUNC_DIV_EXPR:
1433 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1434 case EXACT_DIV_EXPR:
1435 /* This is a shortcut for a common special case. */
1436 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1437 && ! TREE_CONSTANT_OVERFLOW (arg1)
1438 && ! TREE_CONSTANT_OVERFLOW (arg2)
1439 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1441 if (code == CEIL_DIV_EXPR)
1444 low = int1l / int2l, hi = 0;
1448 /* ... fall through ... */
1450 case ROUND_DIV_EXPR:
1451 if (int2h == 0 && int2l == 0)
1453 if (int2h == 0 && int2l == 1)
1455 low = int1l, hi = int1h;
1458 if (int1l == int2l && int1h == int2h
1459 && ! (int1l == 0 && int1h == 0))
1464 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1465 &low, &hi, &garbagel, &garbageh);
1468 case TRUNC_MOD_EXPR:
1469 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1470 /* This is a shortcut for a common special case. */
1471 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1472 && ! TREE_CONSTANT_OVERFLOW (arg1)
1473 && ! TREE_CONSTANT_OVERFLOW (arg2)
1474 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1476 if (code == CEIL_MOD_EXPR)
1478 low = int1l % int2l, hi = 0;
1482 /* ... fall through ... */
1484 case ROUND_MOD_EXPR:
1485 if (int2h == 0 && int2l == 0)
1487 overflow = div_and_round_double (code, uns,
1488 int1l, int1h, int2l, int2h,
1489 &garbagel, &garbageh, &low, &hi);
1495 low = (((unsigned HOST_WIDE_INT) int1h
1496 < (unsigned HOST_WIDE_INT) int2h)
1497 || (((unsigned HOST_WIDE_INT) int1h
1498 == (unsigned HOST_WIDE_INT) int2h)
1501 low = (int1h < int2h
1502 || (int1h == int2h && int1l < int2l));
1504 if (low == (code == MIN_EXPR))
1505 low = int1l, hi = int1h;
1507 low = int2l, hi = int2h;
1514 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1518 /* Propagate overflow flags ourselves. */
1519 if (((!uns || is_sizetype) && overflow)
1520 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1523 TREE_OVERFLOW (t) = 1;
1524 TREE_CONSTANT_OVERFLOW (t) = 1;
1526 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1529 TREE_CONSTANT_OVERFLOW (t) = 1;
1533 t = force_fit_type (t, 1,
1534 ((!uns || is_sizetype) && overflow)
1535 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1536 TREE_CONSTANT_OVERFLOW (arg1)
1537 | TREE_CONSTANT_OVERFLOW (arg2));
1542 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1543 constant. We assume ARG1 and ARG2 have the same data type, or at least
1544 are the same kind of constant and the same machine mode.
1546 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1549 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1554 if (TREE_CODE (arg1) == INTEGER_CST)
1555 return int_const_binop (code, arg1, arg2, notrunc);
1557 if (TREE_CODE (arg1) == REAL_CST)
1559 enum machine_mode mode;
1562 REAL_VALUE_TYPE value;
1563 REAL_VALUE_TYPE result;
1567 /* The following codes are handled by real_arithmetic. */
1582 d1 = TREE_REAL_CST (arg1);
1583 d2 = TREE_REAL_CST (arg2);
1585 type = TREE_TYPE (arg1);
1586 mode = TYPE_MODE (type);
1588 /* Don't perform operation if we honor signaling NaNs and
1589 either operand is a NaN. */
1590 if (HONOR_SNANS (mode)
1591 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1594 /* Don't perform operation if it would raise a division
1595 by zero exception. */
1596 if (code == RDIV_EXPR
1597 && REAL_VALUES_EQUAL (d2, dconst0)
1598 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1601 /* If either operand is a NaN, just return it. Otherwise, set up
1602 for floating-point trap; we return an overflow. */
1603 if (REAL_VALUE_ISNAN (d1))
1605 else if (REAL_VALUE_ISNAN (d2))
1608 inexact = real_arithmetic (&value, code, &d1, &d2);
1609 real_convert (&result, mode, &value);
1611 /* Don't constant fold this floating point operation if
1612 the result has overflowed and flag_trapping_math. */
1614 if (flag_trapping_math
1615 && MODE_HAS_INFINITIES (mode)
1616 && REAL_VALUE_ISINF (result)
1617 && !REAL_VALUE_ISINF (d1)
1618 && !REAL_VALUE_ISINF (d2))
1621 /* Don't constant fold this floating point operation if the
1622 result may dependent upon the run-time rounding mode and
1623 flag_rounding_math is set, or if GCC's software emulation
1624 is unable to accurately represent the result. */
1626 if ((flag_rounding_math
1627 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1628 && !flag_unsafe_math_optimizations))
1629 && (inexact || !real_identical (&result, &value)))
1632 t = build_real (type, result);
1634 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1635 TREE_CONSTANT_OVERFLOW (t)
1637 | TREE_CONSTANT_OVERFLOW (arg1)
1638 | TREE_CONSTANT_OVERFLOW (arg2);
1642 if (TREE_CODE (arg1) == COMPLEX_CST)
1644 tree type = TREE_TYPE (arg1);
1645 tree r1 = TREE_REALPART (arg1);
1646 tree i1 = TREE_IMAGPART (arg1);
1647 tree r2 = TREE_REALPART (arg2);
1648 tree i2 = TREE_IMAGPART (arg2);
1654 t = build_complex (type,
1655 const_binop (PLUS_EXPR, r1, r2, notrunc),
1656 const_binop (PLUS_EXPR, i1, i2, notrunc));
1660 t = build_complex (type,
1661 const_binop (MINUS_EXPR, r1, r2, notrunc),
1662 const_binop (MINUS_EXPR, i1, i2, notrunc));
1666 t = build_complex (type,
1667 const_binop (MINUS_EXPR,
1668 const_binop (MULT_EXPR,
1670 const_binop (MULT_EXPR,
1673 const_binop (PLUS_EXPR,
1674 const_binop (MULT_EXPR,
1676 const_binop (MULT_EXPR,
1683 tree t1, t2, real, imag;
1685 = const_binop (PLUS_EXPR,
1686 const_binop (MULT_EXPR, r2, r2, notrunc),
1687 const_binop (MULT_EXPR, i2, i2, notrunc),
1690 t1 = const_binop (PLUS_EXPR,
1691 const_binop (MULT_EXPR, r1, r2, notrunc),
1692 const_binop (MULT_EXPR, i1, i2, notrunc),
1694 t2 = const_binop (MINUS_EXPR,
1695 const_binop (MULT_EXPR, i1, r2, notrunc),
1696 const_binop (MULT_EXPR, r1, i2, notrunc),
1699 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1701 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1702 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1706 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1707 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1712 t = build_complex (type, real, imag);
1724 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1725 indicates which particular sizetype to create. */
1728 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1730 return build_int_cst (sizetype_tab[(int) kind], number);
1733 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1734 is a tree code. The type of the result is taken from the operands.
1735 Both must be the same type integer type and it must be a size type.
1736 If the operands are constant, so is the result. */
1739 size_binop (enum tree_code code, tree arg0, tree arg1)
1741 tree type = TREE_TYPE (arg0);
1743 if (arg0 == error_mark_node || arg1 == error_mark_node)
1744 return error_mark_node;
1746 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1747 && type == TREE_TYPE (arg1));
1749 /* Handle the special case of two integer constants faster. */
1750 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1752 /* And some specific cases even faster than that. */
1753 if (code == PLUS_EXPR && integer_zerop (arg0))
1755 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1756 && integer_zerop (arg1))
1758 else if (code == MULT_EXPR && integer_onep (arg0))
1761 /* Handle general case of two integer constants. */
1762 return int_const_binop (code, arg0, arg1, 0);
1765 return fold_build2 (code, type, arg0, arg1);
1768 /* Given two values, either both of sizetype or both of bitsizetype,
1769 compute the difference between the two values. Return the value
1770 in signed type corresponding to the type of the operands. */
1773 size_diffop (tree arg0, tree arg1)
1775 tree type = TREE_TYPE (arg0);
1778 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1779 && type == TREE_TYPE (arg1));
1781 /* If the type is already signed, just do the simple thing. */
1782 if (!TYPE_UNSIGNED (type))
1783 return size_binop (MINUS_EXPR, arg0, arg1);
1785 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1787 /* If either operand is not a constant, do the conversions to the signed
1788 type and subtract. The hardware will do the right thing with any
1789 overflow in the subtraction. */
1790 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1791 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1792 fold_convert (ctype, arg1));
1794 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1795 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1796 overflow) and negate (which can't either). Special-case a result
1797 of zero while we're here. */
1798 if (tree_int_cst_equal (arg0, arg1))
1799 return build_int_cst (ctype, 0);
1800 else if (tree_int_cst_lt (arg1, arg0))
1801 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1803 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1804 fold_convert (ctype, size_binop (MINUS_EXPR,
1808 /* A subroutine of fold_convert_const handling conversions of an
1809 INTEGER_CST to another integer type. */
1812 fold_convert_const_int_from_int (tree type, tree arg1)
1816 /* Given an integer constant, make new constant with new type,
1817 appropriately sign-extended or truncated. */
1818 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1819 TREE_INT_CST_HIGH (arg1));
1821 t = force_fit_type (t,
1822 /* Don't set the overflow when
1823 converting a pointer */
1824 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1825 (TREE_INT_CST_HIGH (arg1) < 0
1826 && (TYPE_UNSIGNED (type)
1827 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1828 | TREE_OVERFLOW (arg1),
1829 TREE_CONSTANT_OVERFLOW (arg1));
1834 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1835 to an integer type. */
1838 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1843 /* The following code implements the floating point to integer
1844 conversion rules required by the Java Language Specification,
1845 that IEEE NaNs are mapped to zero and values that overflow
1846 the target precision saturate, i.e. values greater than
1847 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1848 are mapped to INT_MIN. These semantics are allowed by the
1849 C and C++ standards that simply state that the behavior of
1850 FP-to-integer conversion is unspecified upon overflow. */
1852 HOST_WIDE_INT high, low;
1854 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1858 case FIX_TRUNC_EXPR:
1859 real_trunc (&r, VOIDmode, &x);
1863 real_ceil (&r, VOIDmode, &x);
1866 case FIX_FLOOR_EXPR:
1867 real_floor (&r, VOIDmode, &x);
1870 case FIX_ROUND_EXPR:
1871 real_round (&r, VOIDmode, &x);
1878 /* If R is NaN, return zero and show we have an overflow. */
1879 if (REAL_VALUE_ISNAN (r))
1886 /* See if R is less than the lower bound or greater than the
1891 tree lt = TYPE_MIN_VALUE (type);
1892 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1893 if (REAL_VALUES_LESS (r, l))
1896 high = TREE_INT_CST_HIGH (lt);
1897 low = TREE_INT_CST_LOW (lt);
1903 tree ut = TYPE_MAX_VALUE (type);
1906 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1907 if (REAL_VALUES_LESS (u, r))
1910 high = TREE_INT_CST_HIGH (ut);
1911 low = TREE_INT_CST_LOW (ut);
1917 REAL_VALUE_TO_INT (&low, &high, r);
1919 t = build_int_cst_wide (type, low, high);
1921 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1922 TREE_CONSTANT_OVERFLOW (arg1));
1926 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1927 to another floating point type. */
1930 fold_convert_const_real_from_real (tree type, tree arg1)
1932 REAL_VALUE_TYPE value;
1935 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1936 t = build_real (type, value);
1938 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1939 TREE_CONSTANT_OVERFLOW (t)
1940 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1944 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1945 type TYPE. If no simplification can be done return NULL_TREE. */
1948 fold_convert_const (enum tree_code code, tree type, tree arg1)
1950 if (TREE_TYPE (arg1) == type)
1953 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1955 if (TREE_CODE (arg1) == INTEGER_CST)
1956 return fold_convert_const_int_from_int (type, arg1);
1957 else if (TREE_CODE (arg1) == REAL_CST)
1958 return fold_convert_const_int_from_real (code, type, arg1);
1960 else if (TREE_CODE (type) == REAL_TYPE)
1962 if (TREE_CODE (arg1) == INTEGER_CST)
1963 return build_real_from_int_cst (type, arg1);
1964 if (TREE_CODE (arg1) == REAL_CST)
1965 return fold_convert_const_real_from_real (type, arg1);
1970 /* Construct a vector of zero elements of vector type TYPE. */
1973 build_zero_vector (tree type)
1978 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1979 units = TYPE_VECTOR_SUBPARTS (type);
1982 for (i = 0; i < units; i++)
1983 list = tree_cons (NULL_TREE, elem, list);
1984 return build_vector (type, list);
1987 /* Convert expression ARG to type TYPE. Used by the middle-end for
1988 simple conversions in preference to calling the front-end's convert. */
1991 fold_convert (tree type, tree arg)
1993 tree orig = TREE_TYPE (arg);
1999 if (TREE_CODE (arg) == ERROR_MARK
2000 || TREE_CODE (type) == ERROR_MARK
2001 || TREE_CODE (orig) == ERROR_MARK)
2002 return error_mark_node;
2004 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2005 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2006 TYPE_MAIN_VARIANT (orig)))
2007 return fold_build1 (NOP_EXPR, type, arg);
2009 switch (TREE_CODE (type))
2011 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2012 case POINTER_TYPE: case REFERENCE_TYPE:
2014 if (TREE_CODE (arg) == INTEGER_CST)
2016 tem = fold_convert_const (NOP_EXPR, type, arg);
2017 if (tem != NULL_TREE)
2020 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == OFFSET_TYPE)
2022 return fold_build1 (NOP_EXPR, type, arg);
2023 if (TREE_CODE (orig) == COMPLEX_TYPE)
2025 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert (type, tem);
2028 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2029 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030 return fold_build1 (NOP_EXPR, type, arg);
2033 if (TREE_CODE (arg) == INTEGER_CST)
2035 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2036 if (tem != NULL_TREE)
2039 else if (TREE_CODE (arg) == REAL_CST)
2041 tem = fold_convert_const (NOP_EXPR, type, arg);
2042 if (tem != NULL_TREE)
2046 switch (TREE_CODE (orig))
2049 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2050 case POINTER_TYPE: case REFERENCE_TYPE:
2051 return fold_build1 (FLOAT_EXPR, type, arg);
2054 return fold_build1 (NOP_EXPR, type, arg);
2057 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2058 return fold_convert (type, tem);
2065 switch (TREE_CODE (orig))
2068 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2069 case POINTER_TYPE: case REFERENCE_TYPE:
2071 return build2 (COMPLEX_EXPR, type,
2072 fold_convert (TREE_TYPE (type), arg),
2073 fold_convert (TREE_TYPE (type), integer_zero_node));
2078 if (TREE_CODE (arg) == COMPLEX_EXPR)
2080 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2081 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 arg = save_expr (arg);
2086 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2087 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2088 rpart = fold_convert (TREE_TYPE (type), rpart);
2089 ipart = fold_convert (TREE_TYPE (type), ipart);
2090 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2098 if (integer_zerop (arg))
2099 return build_zero_vector (type);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2102 || TREE_CODE (orig) == VECTOR_TYPE);
2103 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2106 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2113 /* Return false if expr can be assumed not to be an lvalue, true
2117 maybe_lvalue_p (tree x)
2119 /* We only need to wrap lvalue tree codes. */
2120 switch (TREE_CODE (x))
2131 case ALIGN_INDIRECT_REF:
2132 case MISALIGNED_INDIRECT_REF:
2134 case ARRAY_RANGE_REF:
2140 case PREINCREMENT_EXPR:
2141 case PREDECREMENT_EXPR:
2143 case TRY_CATCH_EXPR:
2144 case WITH_CLEANUP_EXPR:
2155 /* Assume the worst for front-end tree codes. */
2156 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2164 /* Return an expr equal to X but certainly not valid as an lvalue. */
2169 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2174 if (! maybe_lvalue_p (x))
2176 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2179 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2180 Zero means allow extended lvalues. */
2182 int pedantic_lvalues;
2184 /* When pedantic, return an expr equal to X but certainly not valid as a
2185 pedantic lvalue. Otherwise, return X. */
2188 pedantic_non_lvalue (tree x)
2190 if (pedantic_lvalues)
2191 return non_lvalue (x);
2196 /* Given a tree comparison code, return the code that is the logical inverse
2197 of the given code. It is not safe to do this for floating-point
2198 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2199 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2202 invert_tree_comparison (enum tree_code code, bool honor_nans)
2204 if (honor_nans && flag_trapping_math)
2214 return honor_nans ? UNLE_EXPR : LE_EXPR;
2216 return honor_nans ? UNLT_EXPR : LT_EXPR;
2218 return honor_nans ? UNGE_EXPR : GE_EXPR;
2220 return honor_nans ? UNGT_EXPR : GT_EXPR;
2234 return UNORDERED_EXPR;
2235 case UNORDERED_EXPR:
2236 return ORDERED_EXPR;
2242 /* Similar, but return the comparison that results if the operands are
2243 swapped. This is safe for floating-point. */
2246 swap_tree_comparison (enum tree_code code)
2253 case UNORDERED_EXPR:
2279 /* Convert a comparison tree code from an enum tree_code representation
2280 into a compcode bit-based encoding. This function is the inverse of
2281 compcode_to_comparison. */
2283 static enum comparison_code
2284 comparison_to_compcode (enum tree_code code)
2301 return COMPCODE_ORD;
2302 case UNORDERED_EXPR:
2303 return COMPCODE_UNORD;
2305 return COMPCODE_UNLT;
2307 return COMPCODE_UNEQ;
2309 return COMPCODE_UNLE;
2311 return COMPCODE_UNGT;
2313 return COMPCODE_LTGT;
2315 return COMPCODE_UNGE;
2321 /* Convert a compcode bit-based encoding of a comparison operator back
2322 to GCC's enum tree_code representation. This function is the
2323 inverse of comparison_to_compcode. */
2325 static enum tree_code
2326 compcode_to_comparison (enum comparison_code code)
2343 return ORDERED_EXPR;
2344 case COMPCODE_UNORD:
2345 return UNORDERED_EXPR;
2363 /* Return a tree for the comparison which is the combination of
2364 doing the AND or OR (depending on CODE) of the two operations LCODE
2365 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2366 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2367 if this makes the transformation invalid. */
2370 combine_comparisons (enum tree_code code, enum tree_code lcode,
2371 enum tree_code rcode, tree truth_type,
2372 tree ll_arg, tree lr_arg)
2374 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2375 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2376 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2377 enum comparison_code compcode;
2381 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2382 compcode = lcompcode & rcompcode;
2385 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2386 compcode = lcompcode | rcompcode;
2395 /* Eliminate unordered comparisons, as well as LTGT and ORD
2396 which are not used unless the mode has NaNs. */
2397 compcode &= ~COMPCODE_UNORD;
2398 if (compcode == COMPCODE_LTGT)
2399 compcode = COMPCODE_NE;
2400 else if (compcode == COMPCODE_ORD)
2401 compcode = COMPCODE_TRUE;
2403 else if (flag_trapping_math)
2405 /* Check that the original operation and the optimized ones will trap
2406 under the same condition. */
2407 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2408 && (lcompcode != COMPCODE_EQ)
2409 && (lcompcode != COMPCODE_ORD);
2410 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2411 && (rcompcode != COMPCODE_EQ)
2412 && (rcompcode != COMPCODE_ORD);
2413 bool trap = (compcode & COMPCODE_UNORD) == 0
2414 && (compcode != COMPCODE_EQ)
2415 && (compcode != COMPCODE_ORD);
2417 /* In a short-circuited boolean expression the LHS might be
2418 such that the RHS, if evaluated, will never trap. For
2419 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2420 if neither x nor y is NaN. (This is a mixed blessing: for
2421 example, the expression above will never trap, hence
2422 optimizing it to x < y would be invalid). */
2423 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2424 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2427 /* If the comparison was short-circuited, and only the RHS
2428 trapped, we may now generate a spurious trap. */
2430 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2433 /* If we changed the conditions that cause a trap, we lose. */
2434 if ((ltrap || rtrap) != trap)
2438 if (compcode == COMPCODE_TRUE)
2439 return constant_boolean_node (true, truth_type);
2440 else if (compcode == COMPCODE_FALSE)
2441 return constant_boolean_node (false, truth_type);
2443 return fold_build2 (compcode_to_comparison (compcode),
2444 truth_type, ll_arg, lr_arg);
2447 /* Return nonzero if CODE is a tree code that represents a truth value. */
2450 truth_value_p (enum tree_code code)
2452 return (TREE_CODE_CLASS (code) == tcc_comparison
2453 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2454 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2455 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2458 /* Return nonzero if two operands (typically of the same tree node)
2459 are necessarily equal. If either argument has side-effects this
2460 function returns zero. FLAGS modifies behavior as follows:
2462 If OEP_ONLY_CONST is set, only return nonzero for constants.
2463 This function tests whether the operands are indistinguishable;
2464 it does not test whether they are equal using C's == operation.
2465 The distinction is important for IEEE floating point, because
2466 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2467 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2469 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2470 even though it may hold multiple values during a function.
2471 This is because a GCC tree node guarantees that nothing else is
2472 executed between the evaluation of its "operands" (which may often
2473 be evaluated in arbitrary order). Hence if the operands themselves
2474 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2475 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2476 unset means assuming isochronic (or instantaneous) tree equivalence.
2477 Unless comparing arbitrary expression trees, such as from different
2478 statements, this flag can usually be left unset.
2480 If OEP_PURE_SAME is set, then pure functions with identical arguments
2481 are considered the same. It is used when the caller has other ways
2482 to ensure that global memory is unchanged in between. */
2485 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2487 /* If either is ERROR_MARK, they aren't equal. */
2488 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2491 /* If both types don't have the same signedness, then we can't consider
2492 them equal. We must check this before the STRIP_NOPS calls
2493 because they may change the signedness of the arguments. */
2494 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2500 /* In case both args are comparisons but with different comparison
2501 code, try to swap the comparison operands of one arg to produce
2502 a match and compare that variant. */
2503 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2504 && COMPARISON_CLASS_P (arg0)
2505 && COMPARISON_CLASS_P (arg1))
2507 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2509 if (TREE_CODE (arg0) == swap_code)
2510 return operand_equal_p (TREE_OPERAND (arg0, 0),
2511 TREE_OPERAND (arg1, 1), flags)
2512 && operand_equal_p (TREE_OPERAND (arg0, 1),
2513 TREE_OPERAND (arg1, 0), flags);
2516 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2517 /* This is needed for conversions and for COMPONENT_REF.
2518 Might as well play it safe and always test this. */
2519 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2520 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2521 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2524 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2525 We don't care about side effects in that case because the SAVE_EXPR
2526 takes care of that for us. In all other cases, two expressions are
2527 equal if they have no side effects. If we have two identical
2528 expressions with side effects that should be treated the same due
2529 to the only side effects being identical SAVE_EXPR's, that will
2530 be detected in the recursive calls below. */
2531 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2532 && (TREE_CODE (arg0) == SAVE_EXPR
2533 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2536 /* Next handle constant cases, those for which we can return 1 even
2537 if ONLY_CONST is set. */
2538 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2539 switch (TREE_CODE (arg0))
2542 return (! TREE_CONSTANT_OVERFLOW (arg0)
2543 && ! TREE_CONSTANT_OVERFLOW (arg1)
2544 && tree_int_cst_equal (arg0, arg1));
2547 return (! TREE_CONSTANT_OVERFLOW (arg0)
2548 && ! TREE_CONSTANT_OVERFLOW (arg1)
2549 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2550 TREE_REAL_CST (arg1)));
2556 if (TREE_CONSTANT_OVERFLOW (arg0)
2557 || TREE_CONSTANT_OVERFLOW (arg1))
2560 v1 = TREE_VECTOR_CST_ELTS (arg0);
2561 v2 = TREE_VECTOR_CST_ELTS (arg1);
2564 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2567 v1 = TREE_CHAIN (v1);
2568 v2 = TREE_CHAIN (v2);
2575 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2577 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2581 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2582 && ! memcmp (TREE_STRING_POINTER (arg0),
2583 TREE_STRING_POINTER (arg1),
2584 TREE_STRING_LENGTH (arg0)));
2587 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2593 if (flags & OEP_ONLY_CONST)
2596 /* Define macros to test an operand from arg0 and arg1 for equality and a
2597 variant that allows null and views null as being different from any
2598 non-null value. In the latter case, if either is null, the both
2599 must be; otherwise, do the normal comparison. */
2600 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2601 TREE_OPERAND (arg1, N), flags)
2603 #define OP_SAME_WITH_NULL(N) \
2604 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2605 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2607 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2610 /* Two conversions are equal only if signedness and modes match. */
2611 switch (TREE_CODE (arg0))
2616 case FIX_TRUNC_EXPR:
2617 case FIX_FLOOR_EXPR:
2618 case FIX_ROUND_EXPR:
2619 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2620 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2630 case tcc_comparison:
2632 if (OP_SAME (0) && OP_SAME (1))
2635 /* For commutative ops, allow the other order. */
2636 return (commutative_tree_code (TREE_CODE (arg0))
2637 && operand_equal_p (TREE_OPERAND (arg0, 0),
2638 TREE_OPERAND (arg1, 1), flags)
2639 && operand_equal_p (TREE_OPERAND (arg0, 1),
2640 TREE_OPERAND (arg1, 0), flags));
2643 /* If either of the pointer (or reference) expressions we are
2644 dereferencing contain a side effect, these cannot be equal. */
2645 if (TREE_SIDE_EFFECTS (arg0)
2646 || TREE_SIDE_EFFECTS (arg1))
2649 switch (TREE_CODE (arg0))
2652 case ALIGN_INDIRECT_REF:
2653 case MISALIGNED_INDIRECT_REF:
2659 case ARRAY_RANGE_REF:
2660 /* Operands 2 and 3 may be null. */
2663 && OP_SAME_WITH_NULL (2)
2664 && OP_SAME_WITH_NULL (3));
2667 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2668 may be NULL when we're called to compare MEM_EXPRs. */
2669 return OP_SAME_WITH_NULL (0)
2671 && OP_SAME_WITH_NULL (2);
2674 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2680 case tcc_expression:
2681 switch (TREE_CODE (arg0))
2684 case TRUTH_NOT_EXPR:
2687 case TRUTH_ANDIF_EXPR:
2688 case TRUTH_ORIF_EXPR:
2689 return OP_SAME (0) && OP_SAME (1);
2691 case TRUTH_AND_EXPR:
2693 case TRUTH_XOR_EXPR:
2694 if (OP_SAME (0) && OP_SAME (1))
2697 /* Otherwise take into account this is a commutative operation. */
2698 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2699 TREE_OPERAND (arg1, 1), flags)
2700 && operand_equal_p (TREE_OPERAND (arg0, 1),
2701 TREE_OPERAND (arg1, 0), flags));
2704 /* If the CALL_EXPRs call different functions, then they
2705 clearly can not be equal. */
2710 unsigned int cef = call_expr_flags (arg0);
2711 if (flags & OEP_PURE_SAME)
2712 cef &= ECF_CONST | ECF_PURE;
2719 /* Now see if all the arguments are the same. operand_equal_p
2720 does not handle TREE_LIST, so we walk the operands here
2721 feeding them to operand_equal_p. */
2722 arg0 = TREE_OPERAND (arg0, 1);
2723 arg1 = TREE_OPERAND (arg1, 1);
2724 while (arg0 && arg1)
2726 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2730 arg0 = TREE_CHAIN (arg0);
2731 arg1 = TREE_CHAIN (arg1);
2734 /* If we get here and both argument lists are exhausted
2735 then the CALL_EXPRs are equal. */
2736 return ! (arg0 || arg1);
2742 case tcc_declaration:
2743 /* Consider __builtin_sqrt equal to sqrt. */
2744 return (TREE_CODE (arg0) == FUNCTION_DECL
2745 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2746 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2747 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2754 #undef OP_SAME_WITH_NULL
2757 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2758 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2760 When in doubt, return 0. */
2763 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2765 int unsignedp1, unsignedpo;
2766 tree primarg0, primarg1, primother;
2767 unsigned int correct_width;
2769 if (operand_equal_p (arg0, arg1, 0))
2772 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2773 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2776 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2777 and see if the inner values are the same. This removes any
2778 signedness comparison, which doesn't matter here. */
2779 primarg0 = arg0, primarg1 = arg1;
2780 STRIP_NOPS (primarg0);
2781 STRIP_NOPS (primarg1);
2782 if (operand_equal_p (primarg0, primarg1, 0))
2785 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2786 actual comparison operand, ARG0.
2788 First throw away any conversions to wider types
2789 already present in the operands. */
2791 primarg1 = get_narrower (arg1, &unsignedp1);
2792 primother = get_narrower (other, &unsignedpo);
2794 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2795 if (unsignedp1 == unsignedpo
2796 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2797 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2799 tree type = TREE_TYPE (arg0);
2801 /* Make sure shorter operand is extended the right way
2802 to match the longer operand. */
2803 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2804 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2806 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2813 /* See if ARG is an expression that is either a comparison or is performing
2814 arithmetic on comparisons. The comparisons must only be comparing
2815 two different values, which will be stored in *CVAL1 and *CVAL2; if
2816 they are nonzero it means that some operands have already been found.
2817 No variables may be used anywhere else in the expression except in the
2818 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2819 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2821 If this is true, return 1. Otherwise, return zero. */
2824 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2826 enum tree_code code = TREE_CODE (arg);
2827 enum tree_code_class class = TREE_CODE_CLASS (code);
2829 /* We can handle some of the tcc_expression cases here. */
2830 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2832 else if (class == tcc_expression
2833 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2834 || code == COMPOUND_EXPR))
2837 else if (class == tcc_expression && code == SAVE_EXPR
2838 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2840 /* If we've already found a CVAL1 or CVAL2, this expression is
2841 two complex to handle. */
2842 if (*cval1 || *cval2)
2852 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2855 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2856 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2857 cval1, cval2, save_p));
2862 case tcc_expression:
2863 if (code == COND_EXPR)
2864 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2865 cval1, cval2, save_p)
2866 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2867 cval1, cval2, save_p)
2868 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2869 cval1, cval2, save_p));
2872 case tcc_comparison:
2873 /* First see if we can handle the first operand, then the second. For
2874 the second operand, we know *CVAL1 can't be zero. It must be that
2875 one side of the comparison is each of the values; test for the
2876 case where this isn't true by failing if the two operands
2879 if (operand_equal_p (TREE_OPERAND (arg, 0),
2880 TREE_OPERAND (arg, 1), 0))
2884 *cval1 = TREE_OPERAND (arg, 0);
2885 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2887 else if (*cval2 == 0)
2888 *cval2 = TREE_OPERAND (arg, 0);
2889 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2894 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2896 else if (*cval2 == 0)
2897 *cval2 = TREE_OPERAND (arg, 1);
2898 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2910 /* ARG is a tree that is known to contain just arithmetic operations and
2911 comparisons. Evaluate the operations in the tree substituting NEW0 for
2912 any occurrence of OLD0 as an operand of a comparison and likewise for
2916 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2918 tree type = TREE_TYPE (arg);
2919 enum tree_code code = TREE_CODE (arg);
2920 enum tree_code_class class = TREE_CODE_CLASS (code);
2922 /* We can handle some of the tcc_expression cases here. */
2923 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2925 else if (class == tcc_expression
2926 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2932 return fold_build1 (code, type,
2933 eval_subst (TREE_OPERAND (arg, 0),
2934 old0, new0, old1, new1));
2937 return fold_build2 (code, type,
2938 eval_subst (TREE_OPERAND (arg, 0),
2939 old0, new0, old1, new1),
2940 eval_subst (TREE_OPERAND (arg, 1),
2941 old0, new0, old1, new1));
2943 case tcc_expression:
2947 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2950 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2953 return fold_build3 (code, type,
2954 eval_subst (TREE_OPERAND (arg, 0),
2955 old0, new0, old1, new1),
2956 eval_subst (TREE_OPERAND (arg, 1),
2957 old0, new0, old1, new1),
2958 eval_subst (TREE_OPERAND (arg, 2),
2959 old0, new0, old1, new1));
2963 /* Fall through - ??? */
2965 case tcc_comparison:
2967 tree arg0 = TREE_OPERAND (arg, 0);
2968 tree arg1 = TREE_OPERAND (arg, 1);
2970 /* We need to check both for exact equality and tree equality. The
2971 former will be true if the operand has a side-effect. In that
2972 case, we know the operand occurred exactly once. */
2974 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2976 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2979 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2981 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2984 return fold_build2 (code, type, arg0, arg1);
2992 /* Return a tree for the case when the result of an expression is RESULT
2993 converted to TYPE and OMITTED was previously an operand of the expression
2994 but is now not needed (e.g., we folded OMITTED * 0).
2996 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2997 the conversion of RESULT to TYPE. */
3000 omit_one_operand (tree type, tree result, tree omitted)
3002 tree t = fold_convert (type, result);
3004 if (TREE_SIDE_EFFECTS (omitted))
3005 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3007 return non_lvalue (t);
3010 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3013 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3015 tree t = fold_convert (type, result);
3017 if (TREE_SIDE_EFFECTS (omitted))
3018 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3020 return pedantic_non_lvalue (t);
3023 /* Return a tree for the case when the result of an expression is RESULT
3024 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3025 of the expression but are now not needed.
3027 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3028 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3029 evaluated before OMITTED2. Otherwise, if neither has side effects,
3030 just do the conversion of RESULT to TYPE. */
3033 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3035 tree t = fold_convert (type, result);
3037 if (TREE_SIDE_EFFECTS (omitted2))
3038 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3039 if (TREE_SIDE_EFFECTS (omitted1))
3040 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3042 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3046 /* Return a simplified tree node for the truth-negation of ARG. This
3047 never alters ARG itself. We assume that ARG is an operation that
3048 returns a truth value (0 or 1).
3050 FIXME: one would think we would fold the result, but it causes
3051 problems with the dominator optimizer. */
3054 fold_truth_not_expr (tree arg)
3056 tree type = TREE_TYPE (arg);
3057 enum tree_code code = TREE_CODE (arg);
3059 /* If this is a comparison, we can simply invert it, except for
3060 floating-point non-equality comparisons, in which case we just
3061 enclose a TRUTH_NOT_EXPR around what we have. */
3063 if (TREE_CODE_CLASS (code) == tcc_comparison)
3065 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3066 if (FLOAT_TYPE_P (op_type)
3067 && flag_trapping_math
3068 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3069 && code != NE_EXPR && code != EQ_EXPR)
3073 code = invert_tree_comparison (code,
3074 HONOR_NANS (TYPE_MODE (op_type)));
3075 if (code == ERROR_MARK)
3078 return build2 (code, type,
3079 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3086 return constant_boolean_node (integer_zerop (arg), type);
3088 case TRUTH_AND_EXPR:
3089 return build2 (TRUTH_OR_EXPR, type,
3090 invert_truthvalue (TREE_OPERAND (arg, 0)),
3091 invert_truthvalue (TREE_OPERAND (arg, 1)));
3094 return build2 (TRUTH_AND_EXPR, type,
3095 invert_truthvalue (TREE_OPERAND (arg, 0)),
3096 invert_truthvalue (TREE_OPERAND (arg, 1)));
3098 case TRUTH_XOR_EXPR:
3099 /* Here we can invert either operand. We invert the first operand
3100 unless the second operand is a TRUTH_NOT_EXPR in which case our
3101 result is the XOR of the first operand with the inside of the
3102 negation of the second operand. */
3104 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3105 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3106 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3108 return build2 (TRUTH_XOR_EXPR, type,
3109 invert_truthvalue (TREE_OPERAND (arg, 0)),
3110 TREE_OPERAND (arg, 1));
3112 case TRUTH_ANDIF_EXPR:
3113 return build2 (TRUTH_ORIF_EXPR, type,
3114 invert_truthvalue (TREE_OPERAND (arg, 0)),
3115 invert_truthvalue (TREE_OPERAND (arg, 1)));
3117 case TRUTH_ORIF_EXPR:
3118 return build2 (TRUTH_ANDIF_EXPR, type,
3119 invert_truthvalue (TREE_OPERAND (arg, 0)),
3120 invert_truthvalue (TREE_OPERAND (arg, 1)));
3122 case TRUTH_NOT_EXPR:
3123 return TREE_OPERAND (arg, 0);
3127 tree arg1 = TREE_OPERAND (arg, 1);
3128 tree arg2 = TREE_OPERAND (arg, 2);
3129 /* A COND_EXPR may have a throw as one operand, which
3130 then has void type. Just leave void operands
3132 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3133 VOID_TYPE_P (TREE_TYPE (arg1))
3134 ? arg1 : invert_truthvalue (arg1),
3135 VOID_TYPE_P (TREE_TYPE (arg2))
3136 ? arg2 : invert_truthvalue (arg2));
3140 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3141 invert_truthvalue (TREE_OPERAND (arg, 1)));
3143 case NON_LVALUE_EXPR:
3144 return invert_truthvalue (TREE_OPERAND (arg, 0));
3147 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3148 return build1 (TRUTH_NOT_EXPR, type, arg);
3152 return build1 (TREE_CODE (arg), type,
3153 invert_truthvalue (TREE_OPERAND (arg, 0)));
3156 if (!integer_onep (TREE_OPERAND (arg, 1)))
3158 return build2 (EQ_EXPR, type, arg,
3159 build_int_cst (type, 0));
3162 return build1 (TRUTH_NOT_EXPR, type, arg);
3164 case CLEANUP_POINT_EXPR:
3165 return build1 (CLEANUP_POINT_EXPR, type,
3166 invert_truthvalue (TREE_OPERAND (arg, 0)));
3175 /* Return a simplified tree node for the truth-negation of ARG. This
3176 never alters ARG itself. We assume that ARG is an operation that
3177 returns a truth value (0 or 1).
3179 FIXME: one would think we would fold the result, but it causes
3180 problems with the dominator optimizer. */
3183 invert_truthvalue (tree arg)
3187 if (TREE_CODE (arg) == ERROR_MARK)
3190 tem = fold_truth_not_expr (arg);
3192 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3197 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3198 operands are another bit-wise operation with a common input. If so,
3199 distribute the bit operations to save an operation and possibly two if
3200 constants are involved. For example, convert
3201 (A | B) & (A | C) into A | (B & C)
3202 Further simplification will occur if B and C are constants.
3204 If this optimization cannot be done, 0 will be returned. */
3207 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3212 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3213 || TREE_CODE (arg0) == code
3214 || (TREE_CODE (arg0) != BIT_AND_EXPR
3215 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3218 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3220 common = TREE_OPERAND (arg0, 0);
3221 left = TREE_OPERAND (arg0, 1);
3222 right = TREE_OPERAND (arg1, 1);
3224 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3226 common = TREE_OPERAND (arg0, 0);
3227 left = TREE_OPERAND (arg0, 1);
3228 right = TREE_OPERAND (arg1, 0);
3230 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3232 common = TREE_OPERAND (arg0, 1);
3233 left = TREE_OPERAND (arg0, 0);
3234 right = TREE_OPERAND (arg1, 1);
3236 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3238 common = TREE_OPERAND (arg0, 1);
3239 left = TREE_OPERAND (arg0, 0);
3240 right = TREE_OPERAND (arg1, 0);
3245 return fold_build2 (TREE_CODE (arg0), type, common,
3246 fold_build2 (code, type, left, right));
3249 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3250 with code CODE. This optimization is unsafe. */
3252 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3254 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3255 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3257 /* (A / C) +- (B / C) -> (A +- B) / C. */
3259 && operand_equal_p (TREE_OPERAND (arg0, 1),
3260 TREE_OPERAND (arg1, 1), 0))
3261 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3262 fold_build2 (code, type,
3263 TREE_OPERAND (arg0, 0),
3264 TREE_OPERAND (arg1, 0)),
3265 TREE_OPERAND (arg0, 1));
3267 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3268 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3269 TREE_OPERAND (arg1, 0), 0)
3270 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3271 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3273 REAL_VALUE_TYPE r0, r1;
3274 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3275 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3277 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3279 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3280 real_arithmetic (&r0, code, &r0, &r1);
3281 return fold_build2 (MULT_EXPR, type,
3282 TREE_OPERAND (arg0, 0),
3283 build_real (type, r0));
3289 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3290 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3293 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3300 tree size = TYPE_SIZE (TREE_TYPE (inner));
3301 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3302 || POINTER_TYPE_P (TREE_TYPE (inner)))
3303 && host_integerp (size, 0)
3304 && tree_low_cst (size, 0) == bitsize)
3305 return fold_convert (type, inner);
3308 result = build3 (BIT_FIELD_REF, type, inner,
3309 size_int (bitsize), bitsize_int (bitpos));
3311 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3316 /* Optimize a bit-field compare.
3318 There are two cases: First is a compare against a constant and the
3319 second is a comparison of two items where the fields are at the same
3320 bit position relative to the start of a chunk (byte, halfword, word)
3321 large enough to contain it. In these cases we can avoid the shift
3322 implicit in bitfield extractions.
3324 For constants, we emit a compare of the shifted constant with the
3325 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3326 compared. For two fields at the same position, we do the ANDs with the
3327 similar mask and compare the result of the ANDs.
3329 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3330 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3331 are the left and right operands of the comparison, respectively.
3333 If the optimization described above can be done, we return the resulting
3334 tree. Otherwise we return zero. */
3337 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3340 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3341 tree type = TREE_TYPE (lhs);
3342 tree signed_type, unsigned_type;
3343 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3344 enum machine_mode lmode, rmode, nmode;
3345 int lunsignedp, runsignedp;
3346 int lvolatilep = 0, rvolatilep = 0;
3347 tree linner, rinner = NULL_TREE;
3351 /* Get all the information about the extractions being done. If the bit size
3352 if the same as the size of the underlying object, we aren't doing an
3353 extraction at all and so can do nothing. We also don't want to
3354 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3355 then will no longer be able to replace it. */
3356 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3357 &lunsignedp, &lvolatilep, false);
3358 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3359 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3364 /* If this is not a constant, we can only do something if bit positions,
3365 sizes, and signedness are the same. */
3366 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3367 &runsignedp, &rvolatilep, false);
3369 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3370 || lunsignedp != runsignedp || offset != 0
3371 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3375 /* See if we can find a mode to refer to this field. We should be able to,
3376 but fail if we can't. */
3377 nmode = get_best_mode (lbitsize, lbitpos,
3378 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3379 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3380 TYPE_ALIGN (TREE_TYPE (rinner))),
3381 word_mode, lvolatilep || rvolatilep);
3382 if (nmode == VOIDmode)
3385 /* Set signed and unsigned types of the precision of this mode for the
3387 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3388 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3390 /* Compute the bit position and size for the new reference and our offset
3391 within it. If the new reference is the same size as the original, we
3392 won't optimize anything, so return zero. */
3393 nbitsize = GET_MODE_BITSIZE (nmode);
3394 nbitpos = lbitpos & ~ (nbitsize - 1);
3396 if (nbitsize == lbitsize)
3399 if (BYTES_BIG_ENDIAN)
3400 lbitpos = nbitsize - lbitsize - lbitpos;
3402 /* Make the mask to be used against the extracted field. */
3403 mask = build_int_cst (unsigned_type, -1);
3404 mask = force_fit_type (mask, 0, false, false);
3405 mask = fold_convert (unsigned_type, mask);
3406 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3407 mask = const_binop (RSHIFT_EXPR, mask,
3408 size_int (nbitsize - lbitsize - lbitpos), 0);
3411 /* If not comparing with constant, just rework the comparison
3413 return build2 (code, compare_type,
3414 build2 (BIT_AND_EXPR, unsigned_type,
3415 make_bit_field_ref (linner, unsigned_type,
3416 nbitsize, nbitpos, 1),
3418 build2 (BIT_AND_EXPR, unsigned_type,
3419 make_bit_field_ref (rinner, unsigned_type,
3420 nbitsize, nbitpos, 1),
3423 /* Otherwise, we are handling the constant case. See if the constant is too
3424 big for the field. Warn and return a tree of for 0 (false) if so. We do
3425 this not only for its own sake, but to avoid having to test for this
3426 error case below. If we didn't, we might generate wrong code.
3428 For unsigned fields, the constant shifted right by the field length should
3429 be all zero. For signed fields, the high-order bits should agree with
3434 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3435 fold_convert (unsigned_type, rhs),
3436 size_int (lbitsize), 0)))
3438 warning (0, "comparison is always %d due to width of bit-field",
3440 return constant_boolean_node (code == NE_EXPR, compare_type);
3445 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3446 size_int (lbitsize - 1), 0);
3447 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3449 warning (0, "comparison is always %d due to width of bit-field",
3451 return constant_boolean_node (code == NE_EXPR, compare_type);
3455 /* Single-bit compares should always be against zero. */
3456 if (lbitsize == 1 && ! integer_zerop (rhs))
3458 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3459 rhs = build_int_cst (type, 0);
3462 /* Make a new bitfield reference, shift the constant over the
3463 appropriate number of bits and mask it with the computed mask
3464 (in case this was a signed field). If we changed it, make a new one. */
3465 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3468 TREE_SIDE_EFFECTS (lhs) = 1;
3469 TREE_THIS_VOLATILE (lhs) = 1;
3472 rhs = const_binop (BIT_AND_EXPR,
3473 const_binop (LSHIFT_EXPR,
3474 fold_convert (unsigned_type, rhs),
3475 size_int (lbitpos), 0),
3478 return build2 (code, compare_type,
3479 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3483 /* Subroutine for fold_truthop: decode a field reference.
3485 If EXP is a comparison reference, we return the innermost reference.
3487 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3488 set to the starting bit number.
3490 If the innermost field can be completely contained in a mode-sized
3491 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3493 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3494 otherwise it is not changed.
3496 *PUNSIGNEDP is set to the signedness of the field.
3498 *PMASK is set to the mask used. This is either contained in a
3499 BIT_AND_EXPR or derived from the width of the field.
3501 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3503 Return 0 if this is not a component reference or is one that we can't
3504 do anything with. */
3507 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3508 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3509 int *punsignedp, int *pvolatilep,
3510 tree *pmask, tree *pand_mask)
3512 tree outer_type = 0;
3514 tree mask, inner, offset;
3516 unsigned int precision;
3518 /* All the optimizations using this function assume integer fields.
3519 There are problems with FP fields since the type_for_size call
3520 below can fail for, e.g., XFmode. */
3521 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3524 /* We are interested in the bare arrangement of bits, so strip everything
3525 that doesn't affect the machine mode. However, record the type of the
3526 outermost expression if it may matter below. */
3527 if (TREE_CODE (exp) == NOP_EXPR
3528 || TREE_CODE (exp) == CONVERT_EXPR
3529 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3530 outer_type = TREE_TYPE (exp);
3533 if (TREE_CODE (exp) == BIT_AND_EXPR)
3535 and_mask = TREE_OPERAND (exp, 1);
3536 exp = TREE_OPERAND (exp, 0);
3537 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3538 if (TREE_CODE (and_mask) != INTEGER_CST)
3542 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3543 punsignedp, pvolatilep, false);
3544 if ((inner == exp && and_mask == 0)
3545 || *pbitsize < 0 || offset != 0
3546 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3549 /* If the number of bits in the reference is the same as the bitsize of
3550 the outer type, then the outer type gives the signedness. Otherwise
3551 (in case of a small bitfield) the signedness is unchanged. */
3552 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3553 *punsignedp = TYPE_UNSIGNED (outer_type);
3555 /* Compute the mask to access the bitfield. */
3556 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3557 precision = TYPE_PRECISION (unsigned_type);
3559 mask = build_int_cst (unsigned_type, -1);
3560 mask = force_fit_type (mask, 0, false, false);
3562 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3563 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3565 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3567 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3568 fold_convert (unsigned_type, and_mask), mask);
3571 *pand_mask = and_mask;
3575 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3579 all_ones_mask_p (tree mask, int size)
3581 tree type = TREE_TYPE (mask);
3582 unsigned int precision = TYPE_PRECISION (type);
3585 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3586 tmask = force_fit_type (tmask, 0, false, false);
3589 tree_int_cst_equal (mask,
3590 const_binop (RSHIFT_EXPR,
3591 const_binop (LSHIFT_EXPR, tmask,
3592 size_int (precision - size),
3594 size_int (precision - size), 0));
3597 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3598 represents the sign bit of EXP's type. If EXP represents a sign
3599 or zero extension, also test VAL against the unextended type.
3600 The return value is the (sub)expression whose sign bit is VAL,
3601 or NULL_TREE otherwise. */
3604 sign_bit_p (tree exp, tree val)
3606 unsigned HOST_WIDE_INT mask_lo, lo;
3607 HOST_WIDE_INT mask_hi, hi;
3611 /* Tree EXP must have an integral type. */
3612 t = TREE_TYPE (exp);
3613 if (! INTEGRAL_TYPE_P (t))
3616 /* Tree VAL must be an integer constant. */
3617 if (TREE_CODE (val) != INTEGER_CST
3618 || TREE_CONSTANT_OVERFLOW (val))
3621 width = TYPE_PRECISION (t);
3622 if (width > HOST_BITS_PER_WIDE_INT)
3624 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3627 mask_hi = ((unsigned HOST_WIDE_INT) -1
3628 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3634 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3637 mask_lo = ((unsigned HOST_WIDE_INT) -1
3638 >> (HOST_BITS_PER_WIDE_INT - width));
3641 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3642 treat VAL as if it were unsigned. */
3643 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3644 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3647 /* Handle extension from a narrower type. */
3648 if (TREE_CODE (exp) == NOP_EXPR
3649 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3650 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3655 /* Subroutine for fold_truthop: determine if an operand is simple enough
3656 to be evaluated unconditionally. */
3659 simple_operand_p (tree exp)
3661 /* Strip any conversions that don't change the machine mode. */
3664 return (CONSTANT_CLASS_P (exp)
3665 || TREE_CODE (exp) == SSA_NAME
3667 && ! TREE_ADDRESSABLE (exp)
3668 && ! TREE_THIS_VOLATILE (exp)
3669 && ! DECL_NONLOCAL (exp)
3670 /* Don't regard global variables as simple. They may be
3671 allocated in ways unknown to the compiler (shared memory,
3672 #pragma weak, etc). */
3673 && ! TREE_PUBLIC (exp)
3674 && ! DECL_EXTERNAL (exp)
3675 /* Loading a static variable is unduly expensive, but global
3676 registers aren't expensive. */
3677 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3680 /* The following functions are subroutines to fold_range_test and allow it to
3681 try to change a logical combination of comparisons into a range test.
3684 X == 2 || X == 3 || X == 4 || X == 5
3688 (unsigned) (X - 2) <= 3
3690 We describe each set of comparisons as being either inside or outside
3691 a range, using a variable named like IN_P, and then describe the
3692 range with a lower and upper bound. If one of the bounds is omitted,
3693 it represents either the highest or lowest value of the type.
3695 In the comments below, we represent a range by two numbers in brackets
3696 preceded by a "+" to designate being inside that range, or a "-" to
3697 designate being outside that range, so the condition can be inverted by
3698 flipping the prefix. An omitted bound is represented by a "-". For
3699 example, "- [-, 10]" means being outside the range starting at the lowest
3700 possible value and ending at 10, in other words, being greater than 10.
3701 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3704 We set up things so that the missing bounds are handled in a consistent
3705 manner so neither a missing bound nor "true" and "false" need to be
3706 handled using a special case. */
3708 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3709 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3710 and UPPER1_P are nonzero if the respective argument is an upper bound
3711 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3712 must be specified for a comparison. ARG1 will be converted to ARG0's
3713 type if both are specified. */
3716 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3717 tree arg1, int upper1_p)
3723 /* If neither arg represents infinity, do the normal operation.
3724 Else, if not a comparison, return infinity. Else handle the special
3725 comparison rules. Note that most of the cases below won't occur, but
3726 are handled for consistency. */
3728 if (arg0 != 0 && arg1 != 0)
3730 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3731 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3733 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3736 if (TREE_CODE_CLASS (code) != tcc_comparison)
3739 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3740 for neither. In real maths, we cannot assume open ended ranges are
3741 the same. But, this is computer arithmetic, where numbers are finite.
3742 We can therefore make the transformation of any unbounded range with
3743 the value Z, Z being greater than any representable number. This permits
3744 us to treat unbounded ranges as equal. */
3745 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3746 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3750 result = sgn0 == sgn1;
3753 result = sgn0 != sgn1;
3756 result = sgn0 < sgn1;
3759 result = sgn0 <= sgn1;
3762 result = sgn0 > sgn1;
3765 result = sgn0 >= sgn1;
3771 return constant_boolean_node (result, type);
3774 /* Given EXP, a logical expression, set the range it is testing into
3775 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3776 actually being tested. *PLOW and *PHIGH will be made of the same type
3777 as the returned expression. If EXP is not a comparison, we will most
3778 likely not be returning a useful value and range. */
3781 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3783 enum tree_code code;
3784 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3785 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3787 tree low, high, n_low, n_high;
3789 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3790 and see if we can refine the range. Some of the cases below may not
3791 happen, but it doesn't seem worth worrying about this. We "continue"
3792 the outer loop when we've changed something; otherwise we "break"
3793 the switch, which will "break" the while. */
3796 low = high = build_int_cst (TREE_TYPE (exp), 0);
3800 code = TREE_CODE (exp);
3801 exp_type = TREE_TYPE (exp);
3803 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3805 if (TREE_CODE_LENGTH (code) > 0)
3806 arg0 = TREE_OPERAND (exp, 0);
3807 if (TREE_CODE_CLASS (code) == tcc_comparison
3808 || TREE_CODE_CLASS (code) == tcc_unary
3809 || TREE_CODE_CLASS (code) == tcc_binary)
3810 arg0_type = TREE_TYPE (arg0);
3811 if (TREE_CODE_CLASS (code) == tcc_binary
3812 || TREE_CODE_CLASS (code) == tcc_comparison
3813 || (TREE_CODE_CLASS (code) == tcc_expression
3814 && TREE_CODE_LENGTH (code) > 1))
3815 arg1 = TREE_OPERAND (exp, 1);
3820 case TRUTH_NOT_EXPR:
3821 in_p = ! in_p, exp = arg0;
3824 case EQ_EXPR: case NE_EXPR:
3825 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3826 /* We can only do something if the range is testing for zero
3827 and if the second operand is an integer constant. Note that
3828 saying something is "in" the range we make is done by
3829 complementing IN_P since it will set in the initial case of
3830 being not equal to zero; "out" is leaving it alone. */
3831 if (low == 0 || high == 0
3832 || ! integer_zerop (low) || ! integer_zerop (high)
3833 || TREE_CODE (arg1) != INTEGER_CST)
3838 case NE_EXPR: /* - [c, c] */
3841 case EQ_EXPR: /* + [c, c] */
3842 in_p = ! in_p, low = high = arg1;
3844 case GT_EXPR: /* - [-, c] */
3845 low = 0, high = arg1;
3847 case GE_EXPR: /* + [c, -] */
3848 in_p = ! in_p, low = arg1, high = 0;
3850 case LT_EXPR: /* - [c, -] */
3851 low = arg1, high = 0;
3853 case LE_EXPR: /* + [-, c] */
3854 in_p = ! in_p, low = 0, high = arg1;
3860 /* If this is an unsigned comparison, we also know that EXP is
3861 greater than or equal to zero. We base the range tests we make
3862 on that fact, so we record it here so we can parse existing
3863 range tests. We test arg0_type since often the return type
3864 of, e.g. EQ_EXPR, is boolean. */
3865 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3867 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3869 build_int_cst (arg0_type, 0),
3873 in_p = n_in_p, low = n_low, high = n_high;
3875 /* If the high bound is missing, but we have a nonzero low
3876 bound, reverse the range so it goes from zero to the low bound
3878 if (high == 0 && low && ! integer_zerop (low))
3881 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3882 integer_one_node, 0);
3883 low = build_int_cst (arg0_type, 0);
3891 /* (-x) IN [a,b] -> x in [-b, -a] */
3892 n_low = range_binop (MINUS_EXPR, exp_type,
3893 build_int_cst (exp_type, 0),
3895 n_high = range_binop (MINUS_EXPR, exp_type,
3896 build_int_cst (exp_type, 0),
3898 low = n_low, high = n_high;
3904 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3905 build_int_cst (exp_type, 1));
3908 case PLUS_EXPR: case MINUS_EXPR:
3909 if (TREE_CODE (arg1) != INTEGER_CST)
3912 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3913 move a constant to the other side. */
3914 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3917 /* If EXP is signed, any overflow in the computation is undefined,
3918 so we don't worry about it so long as our computations on
3919 the bounds don't overflow. For unsigned, overflow is defined
3920 and this is exactly the right thing. */
3921 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3922 arg0_type, low, 0, arg1, 0);
3923 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3924 arg0_type, high, 1, arg1, 0);
3925 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3926 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3929 /* Check for an unsigned range which has wrapped around the maximum
3930 value thus making n_high < n_low, and normalize it. */
3931 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3933 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3934 integer_one_node, 0);
3935 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3936 integer_one_node, 0);
3938 /* If the range is of the form +/- [ x+1, x ], we won't
3939 be able to normalize it. But then, it represents the
3940 whole range or the empty set, so make it
3942 if (tree_int_cst_equal (n_low, low)
3943 && tree_int_cst_equal (n_high, high))
3949 low = n_low, high = n_high;
3954 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3955 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3958 if (! INTEGRAL_TYPE_P (arg0_type)
3959 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3960 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3963 n_low = low, n_high = high;
3966 n_low = fold_convert (arg0_type, n_low);
3969 n_high = fold_convert (arg0_type, n_high);
3972 /* If we're converting arg0 from an unsigned type, to exp,
3973 a signed type, we will be doing the comparison as unsigned.
3974 The tests above have already verified that LOW and HIGH
3977 So we have to ensure that we will handle large unsigned
3978 values the same way that the current signed bounds treat
3981 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3984 tree equiv_type = lang_hooks.types.type_for_mode
3985 (TYPE_MODE (arg0_type), 1);
3987 /* A range without an upper bound is, naturally, unbounded.
3988 Since convert would have cropped a very large value, use
3989 the max value for the destination type. */
3991 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3992 : TYPE_MAX_VALUE (arg0_type);
3994 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3995 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3996 fold_convert (arg0_type,
3998 fold_convert (arg0_type,
4001 /* If the low bound is specified, "and" the range with the
4002 range for which the original unsigned value will be
4006 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4007 1, n_low, n_high, 1,
4008 fold_convert (arg0_type,
4013 in_p = (n_in_p == in_p);
4017 /* Otherwise, "or" the range with the range of the input
4018 that will be interpreted as negative. */
4019 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4020 0, n_low, n_high, 1,
4021 fold_convert (arg0_type,
4026 in_p = (in_p != n_in_p);
4031 low = n_low, high = n_high;
4041 /* If EXP is a constant, we can evaluate whether this is true or false. */
4042 if (TREE_CODE (exp) == INTEGER_CST)
4044 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4046 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4052 *pin_p = in_p, *plow = low, *phigh = high;
4056 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4057 type, TYPE, return an expression to test if EXP is in (or out of, depending
4058 on IN_P) the range. Return 0 if the test couldn't be created. */
4061 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4063 tree etype = TREE_TYPE (exp);
4066 #ifdef HAVE_canonicalize_funcptr_for_compare
4067 /* Disable this optimization for function pointer expressions
4068 on targets that require function pointer canonicalization. */
4069 if (HAVE_canonicalize_funcptr_for_compare
4070 && TREE_CODE (etype) == POINTER_TYPE
4071 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4077 value = build_range_check (type, exp, 1, low, high);
4079 return invert_truthvalue (value);
4084 if (low == 0 && high == 0)
4085 return build_int_cst (type, 1);
4088 return fold_build2 (LE_EXPR, type, exp,
4089 fold_convert (etype, high));
4092 return fold_build2 (GE_EXPR, type, exp,
4093 fold_convert (etype, low));
4095 if (operand_equal_p (low, high, 0))
4096 return fold_build2 (EQ_EXPR, type, exp,
4097 fold_convert (etype, low));
4099 if (integer_zerop (low))
4101 if (! TYPE_UNSIGNED (etype))
4103 etype = lang_hooks.types.unsigned_type (etype);
4104 high = fold_convert (etype, high);
4105 exp = fold_convert (etype, exp);
4107 return build_range_check (type, exp, 1, 0, high);
4110 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4111 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4113 unsigned HOST_WIDE_INT lo;
4117 prec = TYPE_PRECISION (etype);
4118 if (prec <= HOST_BITS_PER_WIDE_INT)
4121 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4125 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4126 lo = (unsigned HOST_WIDE_INT) -1;
4129 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4131 if (TYPE_UNSIGNED (etype))
4133 etype = lang_hooks.types.signed_type (etype);
4134 exp = fold_convert (etype, exp);
4136 return fold_build2 (GT_EXPR, type, exp,
4137 build_int_cst (etype, 0));
4141 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4142 This requires wrap-around arithmetics for the type of the expression. */
4143 switch (TREE_CODE (etype))
4146 /* There is no requirement that LOW be within the range of ETYPE
4147 if the latter is a subtype. It must, however, be within the base
4148 type of ETYPE. So be sure we do the subtraction in that type. */
4149 if (TREE_TYPE (etype))
4150 etype = TREE_TYPE (etype);
4155 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4156 TYPE_UNSIGNED (etype));
4163 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4164 if (TREE_CODE (etype) == INTEGER_TYPE
4165 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4167 tree utype, minv, maxv;
4169 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4170 for the type in question, as we rely on this here. */
4171 utype = lang_hooks.types.unsigned_type (etype);
4172 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4173 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4174 integer_one_node, 1);
4175 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4177 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4184 high = fold_convert (etype, high);
4185 low = fold_convert (etype, low);
4186 exp = fold_convert (etype, exp);
4188 value = const_binop (MINUS_EXPR, high, low, 0);
4190 if (value != 0 && !TREE_OVERFLOW (value))
4191 return build_range_check (type,
4192 fold_build2 (MINUS_EXPR, etype, exp, low),
4193 1, build_int_cst (etype, 0), value);
4198 /* Return the predecessor of VAL in its type, handling the infinite case. */
4201 range_predecessor (tree val)
4203 tree type = TREE_TYPE (val);
4205 if (INTEGRAL_TYPE_P (type)
4206 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4209 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4212 /* Return the successor of VAL in its type, handling the infinite case. */
4215 range_successor (tree val)
4217 tree type = TREE_TYPE (val);
4219 if (INTEGRAL_TYPE_P (type)
4220 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4223 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4226 /* Given two ranges, see if we can merge them into one. Return 1 if we
4227 can, 0 if we can't. Set the output range into the specified parameters. */
4230 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4231 tree high0, int in1_p, tree low1, tree high1)
4239 int lowequal = ((low0 == 0 && low1 == 0)
4240 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4241 low0, 0, low1, 0)));
4242 int highequal = ((high0 == 0 && high1 == 0)
4243 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4244 high0, 1, high1, 1)));
4246 /* Make range 0 be the range that starts first, or ends last if they
4247 start at the same value. Swap them if it isn't. */
4248 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4251 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4252 high1, 1, high0, 1))))
4254 temp = in0_p, in0_p = in1_p, in1_p = temp;
4255 tem = low0, low0 = low1, low1 = tem;
4256 tem = high0, high0 = high1, high1 = tem;
4259 /* Now flag two cases, whether the ranges are disjoint or whether the
4260 second range is totally subsumed in the first. Note that the tests
4261 below are simplified by the ones above. */
4262 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4263 high0, 1, low1, 0));
4264 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4265 high1, 1, high0, 1));
4267 /* We now have four cases, depending on whether we are including or
4268 excluding the two ranges. */
4271 /* If they don't overlap, the result is false. If the second range
4272 is a subset it is the result. Otherwise, the range is from the start
4273 of the second to the end of the first. */
4275 in_p = 0, low = high = 0;
4277 in_p = 1, low = low1, high = high1;
4279 in_p = 1, low = low1, high = high0;
4282 else if (in0_p && ! in1_p)
4284 /* If they don't overlap, the result is the first range. If they are
4285 equal, the result is false. If the second range is a subset of the
4286 first, and the ranges begin at the same place, we go from just after
4287 the end of the second range to the end of the first. If the second
4288 range is not a subset of the first, or if it is a subset and both
4289 ranges end at the same place, the range starts at the start of the
4290 first range and ends just before the second range.
4291 Otherwise, we can't describe this as a single range. */
4293 in_p = 1, low = low0, high = high0;
4294 else if (lowequal && highequal)
4295 in_p = 0, low = high = 0;
4296 else if (subset && lowequal)
4298 low = range_successor (high1);
4302 else if (! subset || highequal)
4305 high = range_predecessor (low1);
4312 else if (! in0_p && in1_p)
4314 /* If they don't overlap, the result is the second range. If the second
4315 is a subset of the first, the result is false. Otherwise,
4316 the range starts just after the first range and ends at the
4317 end of the second. */
4319 in_p = 1, low = low1, high = high1;
4320 else if (subset || highequal)
4321 in_p = 0, low = high = 0;
4324 low = range_successor (high0);
4332 /* The case where we are excluding both ranges. Here the complex case
4333 is if they don't overlap. In that case, the only time we have a
4334 range is if they are adjacent. If the second is a subset of the
4335 first, the result is the first. Otherwise, the range to exclude
4336 starts at the beginning of the first range and ends at the end of the
4340 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4341 range_successor (high0),
4343 in_p = 0, low = low0, high = high1;
4346 /* Canonicalize - [min, x] into - [-, x]. */
4347 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4348 switch (TREE_CODE (TREE_TYPE (low0)))
4351 if (TYPE_PRECISION (TREE_TYPE (low0))
4352 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4356 if (tree_int_cst_equal (low0,
4357 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4361 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4362 && integer_zerop (low0))
4369 /* Canonicalize - [x, max] into - [x, -]. */
4370 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4371 switch (TREE_CODE (TREE_TYPE (high1)))
4374 if (TYPE_PRECISION (TREE_TYPE (high1))
4375 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4379 if (tree_int_cst_equal (high1,
4380 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4384 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4385 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4387 integer_one_node, 1)))
4394 /* The ranges might be also adjacent between the maximum and
4395 minimum values of the given type. For
4396 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4397 return + [x + 1, y - 1]. */
4398 if (low0 == 0 && high1 == 0)
4400 low = range_successor (high0);
4401 high = range_predecessor (low1);
4402 if (low == 0 || high == 0)
4412 in_p = 0, low = low0, high = high0;
4414 in_p = 0, low = low0, high = high1;
4417 *pin_p = in_p, *plow = low, *phigh = high;
4422 /* Subroutine of fold, looking inside expressions of the form
4423 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4424 of the COND_EXPR. This function is being used also to optimize
4425 A op B ? C : A, by reversing the comparison first.
4427 Return a folded expression whose code is not a COND_EXPR
4428 anymore, or NULL_TREE if no folding opportunity is found. */
4431 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4433 enum tree_code comp_code = TREE_CODE (arg0);
4434 tree arg00 = TREE_OPERAND (arg0, 0);
4435 tree arg01 = TREE_OPERAND (arg0, 1);
4436 tree arg1_type = TREE_TYPE (arg1);
4442 /* If we have A op 0 ? A : -A, consider applying the following
4445 A == 0? A : -A same as -A
4446 A != 0? A : -A same as A
4447 A >= 0? A : -A same as abs (A)
4448 A > 0? A : -A same as abs (A)
4449 A <= 0? A : -A same as -abs (A)
4450 A < 0? A : -A same as -abs (A)
4452 None of these transformations work for modes with signed
4453 zeros. If A is +/-0, the first two transformations will
4454 change the sign of the result (from +0 to -0, or vice
4455 versa). The last four will fix the sign of the result,
4456 even though the original expressions could be positive or
4457 negative, depending on the sign of A.
4459 Note that all these transformations are correct if A is
4460 NaN, since the two alternatives (A and -A) are also NaNs. */
4461 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4462 ? real_zerop (arg01)
4463 : integer_zerop (arg01))
4464 && ((TREE_CODE (arg2) == NEGATE_EXPR
4465 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4466 /* In the case that A is of the form X-Y, '-A' (arg2) may
4467 have already been folded to Y-X, check for that. */
4468 || (TREE_CODE (arg1) == MINUS_EXPR
4469 && TREE_CODE (arg2) == MINUS_EXPR
4470 && operand_equal_p (TREE_OPERAND (arg1, 0),
4471 TREE_OPERAND (arg2, 1), 0)
4472 && operand_equal_p (TREE_OPERAND (arg1, 1),
4473 TREE_OPERAND (arg2, 0), 0))))
4478 tem = fold_convert (arg1_type, arg1);
4479 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4482 return pedantic_non_lvalue (fold_convert (type, arg1));
4485 if (flag_trapping_math)
4490 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4491 arg1 = fold_convert (lang_hooks.types.signed_type
4492 (TREE_TYPE (arg1)), arg1);
4493 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4494 return pedantic_non_lvalue (fold_convert (type, tem));
4497 if (flag_trapping_math)
4501 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4502 arg1 = fold_convert (lang_hooks.types.signed_type
4503 (TREE_TYPE (arg1)), arg1);
4504 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4505 return negate_expr (fold_convert (type, tem));
4507 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4511 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4512 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4513 both transformations are correct when A is NaN: A != 0
4514 is then true, and A == 0 is false. */
4516 if (integer_zerop (arg01) && integer_zerop (arg2))
4518 if (comp_code == NE_EXPR)
4519 return pedantic_non_lvalue (fold_convert (type, arg1));
4520 else if (comp_code == EQ_EXPR)
4521 return build_int_cst (type, 0);
4524 /* Try some transformations of A op B ? A : B.
4526 A == B? A : B same as B
4527 A != B? A : B same as A
4528 A >= B? A : B same as max (A, B)
4529 A > B? A : B same as max (B, A)
4530 A <= B? A : B same as min (A, B)
4531 A < B? A : B same as min (B, A)
4533 As above, these transformations don't work in the presence
4534 of signed zeros. For example, if A and B are zeros of
4535 opposite sign, the first two transformations will change
4536 the sign of the result. In the last four, the original
4537 expressions give different results for (A=+0, B=-0) and
4538 (A=-0, B=+0), but the transformed expressions do not.
4540 The first two transformations are correct if either A or B
4541 is a NaN. In the first transformation, the condition will
4542 be false, and B will indeed be chosen. In the case of the
4543 second transformation, the condition A != B will be true,
4544 and A will be chosen.
4546 The conversions to max() and min() are not correct if B is
4547 a number and A is not. The conditions in the original
4548 expressions will be false, so all four give B. The min()
4549 and max() versions would give a NaN instead. */
4550 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4551 /* Avoid these transformations if the COND_EXPR may be used
4552 as an lvalue in the C++ front-end. PR c++/19199. */
4554 || (strcmp (lang_hooks.name, "GNU C++") != 0
4555 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4556 || ! maybe_lvalue_p (arg1)
4557 || ! maybe_lvalue_p (arg2)))
4559 tree comp_op0 = arg00;
4560 tree comp_op1 = arg01;
4561 tree comp_type = TREE_TYPE (comp_op0);
4563 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4564 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4574 return pedantic_non_lvalue (fold_convert (type, arg2));
4576 return pedantic_non_lvalue (fold_convert (type, arg1));
4581 /* In C++ a ?: expression can be an lvalue, so put the
4582 operand which will be used if they are equal first
4583 so that we can convert this back to the
4584 corresponding COND_EXPR. */
4585 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4587 comp_op0 = fold_convert (comp_type, comp_op0);
4588 comp_op1 = fold_convert (comp_type, comp_op1);
4589 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4590 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4591 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4592 return pedantic_non_lvalue (fold_convert (type, tem));
4599 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4601 comp_op0 = fold_convert (comp_type, comp_op0);
4602 comp_op1 = fold_convert (comp_type, comp_op1);
4603 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4604 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4605 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4606 return pedantic_non_lvalue (fold_convert (type, tem));
4610 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4611 return pedantic_non_lvalue (fold_convert (type, arg2));
4614 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4615 return pedantic_non_lvalue (fold_convert (type, arg1));
4618 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4623 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4624 we might still be able to simplify this. For example,
4625 if C1 is one less or one more than C2, this might have started
4626 out as a MIN or MAX and been transformed by this function.
4627 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4629 if (INTEGRAL_TYPE_P (type)
4630 && TREE_CODE (arg01) == INTEGER_CST
4631 && TREE_CODE (arg2) == INTEGER_CST)
4635 /* We can replace A with C1 in this case. */
4636 arg1 = fold_convert (type, arg01);
4637 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4640 /* If C1 is C2 + 1, this is min(A, C2). */
4641 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4643 && operand_equal_p (arg01,
4644 const_binop (PLUS_EXPR, arg2,
4645 integer_one_node, 0),
4647 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4652 /* If C1 is C2 - 1, this is min(A, C2). */
4653 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4655 && operand_equal_p (arg01,
4656 const_binop (MINUS_EXPR, arg2,
4657 integer_one_node, 0),
4659 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4664 /* If C1 is C2 - 1, this is max(A, C2). */
4665 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4667 && operand_equal_p (arg01,
4668 const_binop (MINUS_EXPR, arg2,
4669 integer_one_node, 0),
4671 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4676 /* If C1 is C2 + 1, this is max(A, C2). */
4677 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4679 && operand_equal_p (arg01,
4680 const_binop (PLUS_EXPR, arg2,
4681 integer_one_node, 0),
4683 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4697 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4698 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4701 /* EXP is some logical combination of boolean tests. See if we can
4702 merge it into some range test. Return the new tree if so. */
4705 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4707 int or_op = (code == TRUTH_ORIF_EXPR
4708 || code == TRUTH_OR_EXPR);
4709 int in0_p, in1_p, in_p;
4710 tree low0, low1, low, high0, high1, high;
4711 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4712 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4715 /* If this is an OR operation, invert both sides; we will invert
4716 again at the end. */
4718 in0_p = ! in0_p, in1_p = ! in1_p;
4720 /* If both expressions are the same, if we can merge the ranges, and we
4721 can build the range test, return it or it inverted. If one of the
4722 ranges is always true or always false, consider it to be the same
4723 expression as the other. */
4724 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4725 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4727 && 0 != (tem = (build_range_check (type,
4729 : rhs != 0 ? rhs : integer_zero_node,
4731 return or_op ? invert_truthvalue (tem) : tem;
4733 /* On machines where the branch cost is expensive, if this is a
4734 short-circuited branch and the underlying object on both sides
4735 is the same, make a non-short-circuit operation. */
4736 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4737 && lhs != 0 && rhs != 0
4738 && (code == TRUTH_ANDIF_EXPR
4739 || code == TRUTH_ORIF_EXPR)
4740 && operand_equal_p (lhs, rhs, 0))
4742 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4743 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4744 which cases we can't do this. */
4745 if (simple_operand_p (lhs))
4746 return build2 (code == TRUTH_ANDIF_EXPR
4747 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4750 else if (lang_hooks.decls.global_bindings_p () == 0
4751 && ! CONTAINS_PLACEHOLDER_P (lhs))
4753 tree common = save_expr (lhs);
4755 if (0 != (lhs = build_range_check (type, common,
4756 or_op ? ! in0_p : in0_p,
4758 && (0 != (rhs = build_range_check (type, common,
4759 or_op ? ! in1_p : in1_p,
4761 return build2 (code == TRUTH_ANDIF_EXPR
4762 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4770 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4771 bit value. Arrange things so the extra bits will be set to zero if and
4772 only if C is signed-extended to its full width. If MASK is nonzero,
4773 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4776 unextend (tree c, int p, int unsignedp, tree mask)
4778 tree type = TREE_TYPE (c);
4779 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4782 if (p == modesize || unsignedp)
4785 /* We work by getting just the sign bit into the low-order bit, then
4786 into the high-order bit, then sign-extend. We then XOR that value
4788 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4789 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4791 /* We must use a signed type in order to get an arithmetic right shift.
4792 However, we must also avoid introducing accidental overflows, so that
4793 a subsequent call to integer_zerop will work. Hence we must
4794 do the type conversion here. At this point, the constant is either
4795 zero or one, and the conversion to a signed type can never overflow.
4796 We could get an overflow if this conversion is done anywhere else. */
4797 if (TYPE_UNSIGNED (type))
4798 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4800 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4801 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4803 temp = const_binop (BIT_AND_EXPR, temp,
4804 fold_convert (TREE_TYPE (c), mask), 0);
4805 /* If necessary, convert the type back to match the type of C. */
4806 if (TYPE_UNSIGNED (type))
4807 temp = fold_convert (type, temp);
4809 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4812 /* Find ways of folding logical expressions of LHS and RHS:
4813 Try to merge two comparisons to the same innermost item.
4814 Look for range tests like "ch >= '0' && ch <= '9'".
4815 Look for combinations of simple terms on machines with expensive branches
4816 and evaluate the RHS unconditionally.
4818 For example, if we have p->a == 2 && p->b == 4 and we can make an
4819 object large enough to span both A and B, we can do this with a comparison
4820 against the object ANDed with the a mask.
4822 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4823 operations to do this with one comparison.
4825 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4826 function and the one above.
4828 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4829 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4831 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4834 We return the simplified tree or 0 if no optimization is possible. */
4837 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4839 /* If this is the "or" of two comparisons, we can do something if
4840 the comparisons are NE_EXPR. If this is the "and", we can do something
4841 if the comparisons are EQ_EXPR. I.e.,
4842 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4844 WANTED_CODE is this operation code. For single bit fields, we can
4845 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4846 comparison for one-bit fields. */
4848 enum tree_code wanted_code;
4849 enum tree_code lcode, rcode;
4850 tree ll_arg, lr_arg, rl_arg, rr_arg;
4851 tree ll_inner, lr_inner, rl_inner, rr_inner;
4852 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4853 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4854 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4855 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4856 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4857 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4858 enum machine_mode lnmode, rnmode;
4859 tree ll_mask, lr_mask, rl_mask, rr_mask;
4860 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4861 tree l_const, r_const;
4862 tree lntype, rntype, result;
4863 int first_bit, end_bit;
4865 tree orig_lhs = lhs, orig_rhs = rhs;
4866 enum tree_code orig_code = code;
4868 /* Start by getting the comparison codes. Fail if anything is volatile.
4869 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4870 it were surrounded with a NE_EXPR. */
4872 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4875 lcode = TREE_CODE (lhs);
4876 rcode = TREE_CODE (rhs);
4878 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4880 lhs = build2 (NE_EXPR, truth_type, lhs,
4881 build_int_cst (TREE_TYPE (lhs), 0));
4885 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4887 rhs = build2 (NE_EXPR, truth_type, rhs,
4888 build_int_cst (TREE_TYPE (rhs), 0));
4892 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4893 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4896 ll_arg = TREE_OPERAND (lhs, 0);
4897 lr_arg = TREE_OPERAND (lhs, 1);
4898 rl_arg = TREE_OPERAND (rhs, 0);
4899 rr_arg = TREE_OPERAND (rhs, 1);
4901 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4902 if (simple_operand_p (ll_arg)
4903 && simple_operand_p (lr_arg))
4906 if (operand_equal_p (ll_arg, rl_arg, 0)
4907 && operand_equal_p (lr_arg, rr_arg, 0))
4909 result = combine_comparisons (code, lcode, rcode,
4910 truth_type, ll_arg, lr_arg);
4914 else if (operand_equal_p (ll_arg, rr_arg, 0)
4915 && operand_equal_p (lr_arg, rl_arg, 0))
4917 result = combine_comparisons (code, lcode,
4918 swap_tree_comparison (rcode),
4919 truth_type, ll_arg, lr_arg);
4925 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4926 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4928 /* If the RHS can be evaluated unconditionally and its operands are
4929 simple, it wins to evaluate the RHS unconditionally on machines
4930 with expensive branches. In this case, this isn't a comparison
4931 that can be merged. Avoid doing this if the RHS is a floating-point
4932 comparison since those can trap. */
4934 if (BRANCH_COST >= 2
4935 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4936 && simple_operand_p (rl_arg)
4937 && simple_operand_p (rr_arg))
4939 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4940 if (code == TRUTH_OR_EXPR
4941 && lcode == NE_EXPR && integer_zerop (lr_arg)
4942 && rcode == NE_EXPR && integer_zerop (rr_arg)
4943 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4944 return build2 (NE_EXPR, truth_type,
4945 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4947 build_int_cst (TREE_TYPE (ll_arg), 0));
4949 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4950 if (code == TRUTH_AND_EXPR
4951 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4952 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4953 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4954 return build2 (EQ_EXPR, truth_type,
4955 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4957 build_int_cst (TREE_TYPE (ll_arg), 0));
4959 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4961 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
4962 return build2 (code, truth_type, lhs, rhs);
4967 /* See if the comparisons can be merged. Then get all the parameters for
4970 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4971 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4975 ll_inner = decode_field_reference (ll_arg,
4976 &ll_bitsize, &ll_bitpos, &ll_mode,
4977 &ll_unsignedp, &volatilep, &ll_mask,
4979 lr_inner = decode_field_reference (lr_arg,
4980 &lr_bitsize, &lr_bitpos, &lr_mode,
4981 &lr_unsignedp, &volatilep, &lr_mask,
4983 rl_inner = decode_field_reference (rl_arg,
4984 &rl_bitsize, &rl_bitpos, &rl_mode,
4985 &rl_unsignedp, &volatilep, &rl_mask,
4987 rr_inner = decode_field_reference (rr_arg,
4988 &rr_bitsize, &rr_bitpos, &rr_mode,
4989 &rr_unsignedp, &volatilep, &rr_mask,
4992 /* It must be true that the inner operation on the lhs of each
4993 comparison must be the same if we are to be able to do anything.
4994 Then see if we have constants. If not, the same must be true for
4996 if (volatilep || ll_inner == 0 || rl_inner == 0
4997 || ! operand_equal_p (ll_inner, rl_inner, 0))
5000 if (TREE_CODE (lr_arg) == INTEGER_CST
5001 && TREE_CODE (rr_arg) == INTEGER_CST)
5002 l_const = lr_arg, r_const = rr_arg;
5003 else if (lr_inner == 0 || rr_inner == 0
5004 || ! operand_equal_p (lr_inner, rr_inner, 0))
5007 l_const = r_const = 0;
5009 /* If either comparison code is not correct for our logical operation,
5010 fail. However, we can convert a one-bit comparison against zero into
5011 the opposite comparison against that bit being set in the field. */
5013 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5014 if (lcode != wanted_code)
5016 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5018 /* Make the left operand unsigned, since we are only interested
5019 in the value of one bit. Otherwise we are doing the wrong
5028 /* This is analogous to the code for l_const above. */
5029 if (rcode != wanted_code)
5031 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5040 /* After this point all optimizations will generate bit-field
5041 references, which we might not want. */
5042 if (! lang_hooks.can_use_bit_fields_p ())
5045 /* See if we can find a mode that contains both fields being compared on
5046 the left. If we can't, fail. Otherwise, update all constants and masks
5047 to be relative to a field of that size. */
5048 first_bit = MIN (ll_bitpos, rl_bitpos);
5049 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5050 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5051 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5053 if (lnmode == VOIDmode)
5056 lnbitsize = GET_MODE_BITSIZE (lnmode);
5057 lnbitpos = first_bit & ~ (lnbitsize - 1);
5058 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5059 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5061 if (BYTES_BIG_ENDIAN)
5063 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5064 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5067 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5068 size_int (xll_bitpos), 0);
5069 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5070 size_int (xrl_bitpos), 0);
5074 l_const = fold_convert (lntype, l_const);
5075 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5076 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5077 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5078 fold_build1 (BIT_NOT_EXPR,
5082 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5084 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5089 r_const = fold_convert (lntype, r_const);
5090 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5091 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5092 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5093 fold_build1 (BIT_NOT_EXPR,
5097 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5099 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5103 /* If the right sides are not constant, do the same for it. Also,
5104 disallow this optimization if a size or signedness mismatch occurs
5105 between the left and right sides. */
5108 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5109 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5110 /* Make sure the two fields on the right
5111 correspond to the left without being swapped. */
5112 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5115 first_bit = MIN (lr_bitpos, rr_bitpos);
5116 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5117 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5118 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5120 if (rnmode == VOIDmode)
5123 rnbitsize = GET_MODE_BITSIZE (rnmode);
5124 rnbitpos = first_bit & ~ (rnbitsize - 1);
5125 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5126 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5128 if (BYTES_BIG_ENDIAN)
5130 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5131 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5134 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5135 size_int (xlr_bitpos), 0);
5136 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5137 size_int (xrr_bitpos), 0);
5139 /* Make a mask that corresponds to both fields being compared.
5140 Do this for both items being compared. If the operands are the
5141 same size and the bits being compared are in the same position
5142 then we can do this by masking both and comparing the masked
5144 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5145 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5146 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5148 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5149 ll_unsignedp || rl_unsignedp);
5150 if (! all_ones_mask_p (ll_mask, lnbitsize))
5151 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5153 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5154 lr_unsignedp || rr_unsignedp);
5155 if (! all_ones_mask_p (lr_mask, rnbitsize))
5156 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5158 return build2 (wanted_code, truth_type, lhs, rhs);
5161 /* There is still another way we can do something: If both pairs of
5162 fields being compared are adjacent, we may be able to make a wider
5163 field containing them both.
5165 Note that we still must mask the lhs/rhs expressions. Furthermore,
5166 the mask must be shifted to account for the shift done by
5167 make_bit_field_ref. */
5168 if ((ll_bitsize + ll_bitpos == rl_bitpos
5169 && lr_bitsize + lr_bitpos == rr_bitpos)
5170 || (ll_bitpos == rl_bitpos + rl_bitsize
5171 && lr_bitpos == rr_bitpos + rr_bitsize))
5175 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5176 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5177 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5178 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5180 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5181 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5182 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5183 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5185 /* Convert to the smaller type before masking out unwanted bits. */
5187 if (lntype != rntype)
5189 if (lnbitsize > rnbitsize)
5191 lhs = fold_convert (rntype, lhs);
5192 ll_mask = fold_convert (rntype, ll_mask);
5195 else if (lnbitsize < rnbitsize)
5197 rhs = fold_convert (lntype, rhs);
5198 lr_mask = fold_convert (lntype, lr_mask);
5203 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5204 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5206 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5207 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5209 return build2 (wanted_code, truth_type, lhs, rhs);
5215 /* Handle the case of comparisons with constants. If there is something in
5216 common between the masks, those bits of the constants must be the same.
5217 If not, the condition is always false. Test for this to avoid generating
5218 incorrect code below. */
5219 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5220 if (! integer_zerop (result)
5221 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5222 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5224 if (wanted_code == NE_EXPR)
5226 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5227 return constant_boolean_node (true, truth_type);
5231 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5232 return constant_boolean_node (false, truth_type);
5236 /* Construct the expression we will return. First get the component
5237 reference we will make. Unless the mask is all ones the width of
5238 that field, perform the mask operation. Then compare with the
5240 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5241 ll_unsignedp || rl_unsignedp);
5243 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5244 if (! all_ones_mask_p (ll_mask, lnbitsize))
5245 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5247 return build2 (wanted_code, truth_type, result,
5248 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5251 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5255 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5258 enum tree_code op_code;
5259 tree comp_const = op1;
5261 int consts_equal, consts_lt;
5264 STRIP_SIGN_NOPS (arg0);
5266 op_code = TREE_CODE (arg0);
5267 minmax_const = TREE_OPERAND (arg0, 1);
5268 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5269 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5270 inner = TREE_OPERAND (arg0, 0);
5272 /* If something does not permit us to optimize, return the original tree. */
5273 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5274 || TREE_CODE (comp_const) != INTEGER_CST
5275 || TREE_CONSTANT_OVERFLOW (comp_const)
5276 || TREE_CODE (minmax_const) != INTEGER_CST
5277 || TREE_CONSTANT_OVERFLOW (minmax_const))
5280 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5281 and GT_EXPR, doing the rest with recursive calls using logical
5285 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5287 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5290 return invert_truthvalue (tem);
5296 fold_build2 (TRUTH_ORIF_EXPR, type,
5297 optimize_minmax_comparison
5298 (EQ_EXPR, type, arg0, comp_const),
5299 optimize_minmax_comparison
5300 (GT_EXPR, type, arg0, comp_const));
5303 if (op_code == MAX_EXPR && consts_equal)
5304 /* MAX (X, 0) == 0 -> X <= 0 */
5305 return fold_build2 (LE_EXPR, type, inner, comp_const);
5307 else if (op_code == MAX_EXPR && consts_lt)
5308 /* MAX (X, 0) == 5 -> X == 5 */
5309 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5311 else if (op_code == MAX_EXPR)
5312 /* MAX (X, 0) == -1 -> false */
5313 return omit_one_operand (type, integer_zero_node, inner);
5315 else if (consts_equal)
5316 /* MIN (X, 0) == 0 -> X >= 0 */
5317 return fold_build2 (GE_EXPR, type, inner, comp_const);
5320 /* MIN (X, 0) == 5 -> false */
5321 return omit_one_operand (type, integer_zero_node, inner);
5324 /* MIN (X, 0) == -1 -> X == -1 */
5325 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5328 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5329 /* MAX (X, 0) > 0 -> X > 0
5330 MAX (X, 0) > 5 -> X > 5 */
5331 return fold_build2 (GT_EXPR, type, inner, comp_const);
5333 else if (op_code == MAX_EXPR)
5334 /* MAX (X, 0) > -1 -> true */
5335 return omit_one_operand (type, integer_one_node, inner);
5337 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5338 /* MIN (X, 0) > 0 -> false
5339 MIN (X, 0) > 5 -> false */
5340 return omit_one_operand (type, integer_zero_node, inner);
5343 /* MIN (X, 0) > -1 -> X > -1 */
5344 return fold_build2 (GT_EXPR, type, inner, comp_const);
5351 /* T is an integer expression that is being multiplied, divided, or taken a
5352 modulus (CODE says which and what kind of divide or modulus) by a
5353 constant C. See if we can eliminate that operation by folding it with
5354 other operations already in T. WIDE_TYPE, if non-null, is a type that
5355 should be used for the computation if wider than our type.
5357 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5358 (X * 2) + (Y * 4). We must, however, be assured that either the original
5359 expression would not overflow or that overflow is undefined for the type
5360 in the language in question.
5362 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5363 the machine has a multiply-accumulate insn or that this is part of an
5364 addressing calculation.
5366 If we return a non-null expression, it is an equivalent form of the
5367 original computation, but need not be in the original type. */
5370 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5372 /* To avoid exponential search depth, refuse to allow recursion past
5373 three levels. Beyond that (1) it's highly unlikely that we'll find
5374 something interesting and (2) we've probably processed it before
5375 when we built the inner expression. */
5384 ret = extract_muldiv_1 (t, c, code, wide_type);
5391 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5393 tree type = TREE_TYPE (t);
5394 enum tree_code tcode = TREE_CODE (t);
5395 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5396 > GET_MODE_SIZE (TYPE_MODE (type)))
5397 ? wide_type : type);
5399 int same_p = tcode == code;
5400 tree op0 = NULL_TREE, op1 = NULL_TREE;
5402 /* Don't deal with constants of zero here; they confuse the code below. */
5403 if (integer_zerop (c))
5406 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5407 op0 = TREE_OPERAND (t, 0);
5409 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5410 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5412 /* Note that we need not handle conditional operations here since fold
5413 already handles those cases. So just do arithmetic here. */
5417 /* For a constant, we can always simplify if we are a multiply
5418 or (for divide and modulus) if it is a multiple of our constant. */
5419 if (code == MULT_EXPR
5420 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5421 return const_binop (code, fold_convert (ctype, t),
5422 fold_convert (ctype, c), 0);
5425 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5426 /* If op0 is an expression ... */
5427 if ((COMPARISON_CLASS_P (op0)
5428 || UNARY_CLASS_P (op0)
5429 || BINARY_CLASS_P (op0)
5430 || EXPRESSION_CLASS_P (op0))
5431 /* ... and is unsigned, and its type is smaller than ctype,
5432 then we cannot pass through as widening. */
5433 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5434 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5435 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5436 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5437 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5438 /* ... or this is a truncation (t is narrower than op0),
5439 then we cannot pass through this narrowing. */
5440 || (GET_MODE_SIZE (TYPE_MODE (type))
5441 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5442 /* ... or signedness changes for division or modulus,
5443 then we cannot pass through this conversion. */
5444 || (code != MULT_EXPR
5445 && (TYPE_UNSIGNED (ctype)
5446 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5449 /* Pass the constant down and see if we can make a simplification. If
5450 we can, replace this expression with the inner simplification for
5451 possible later conversion to our or some other type. */
5452 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5453 && TREE_CODE (t2) == INTEGER_CST
5454 && ! TREE_CONSTANT_OVERFLOW (t2)
5455 && (0 != (t1 = extract_muldiv (op0, t2, code,
5457 ? ctype : NULL_TREE))))
5462 /* If widening the type changes it from signed to unsigned, then we
5463 must avoid building ABS_EXPR itself as unsigned. */
5464 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5466 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5467 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5469 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5470 return fold_convert (ctype, t1);
5476 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5477 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5480 case MIN_EXPR: case MAX_EXPR:
5481 /* If widening the type changes the signedness, then we can't perform
5482 this optimization as that changes the result. */
5483 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5486 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5487 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5488 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5490 if (tree_int_cst_sgn (c) < 0)
5491 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5493 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5494 fold_convert (ctype, t2));
5498 case LSHIFT_EXPR: case RSHIFT_EXPR:
5499 /* If the second operand is constant, this is a multiplication
5500 or floor division, by a power of two, so we can treat it that
5501 way unless the multiplier or divisor overflows. Signed
5502 left-shift overflow is implementation-defined rather than
5503 undefined in C90, so do not convert signed left shift into
5505 if (TREE_CODE (op1) == INTEGER_CST
5506 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5507 /* const_binop may not detect overflow correctly,
5508 so check for it explicitly here. */
5509 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5510 && TREE_INT_CST_HIGH (op1) == 0
5511 && 0 != (t1 = fold_convert (ctype,
5512 const_binop (LSHIFT_EXPR,
5515 && ! TREE_OVERFLOW (t1))
5516 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5517 ? MULT_EXPR : FLOOR_DIV_EXPR,
5518 ctype, fold_convert (ctype, op0), t1),
5519 c, code, wide_type);
5522 case PLUS_EXPR: case MINUS_EXPR:
5523 /* See if we can eliminate the operation on both sides. If we can, we
5524 can return a new PLUS or MINUS. If we can't, the only remaining
5525 cases where we can do anything are if the second operand is a
5527 t1 = extract_muldiv (op0, c, code, wide_type);
5528 t2 = extract_muldiv (op1, c, code, wide_type);
5529 if (t1 != 0 && t2 != 0
5530 && (code == MULT_EXPR
5531 /* If not multiplication, we can only do this if both operands
5532 are divisible by c. */
5533 || (multiple_of_p (ctype, op0, c)
5534 && multiple_of_p (ctype, op1, c))))
5535 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5536 fold_convert (ctype, t2));
5538 /* If this was a subtraction, negate OP1 and set it to be an addition.
5539 This simplifies the logic below. */
5540 if (tcode == MINUS_EXPR)
5541 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5543 if (TREE_CODE (op1) != INTEGER_CST)
5546 /* If either OP1 or C are negative, this optimization is not safe for
5547 some of the division and remainder types while for others we need
5548 to change the code. */
5549 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5551 if (code == CEIL_DIV_EXPR)
5552 code = FLOOR_DIV_EXPR;
5553 else if (code == FLOOR_DIV_EXPR)
5554 code = CEIL_DIV_EXPR;
5555 else if (code != MULT_EXPR
5556 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5560 /* If it's a multiply or a division/modulus operation of a multiple
5561 of our constant, do the operation and verify it doesn't overflow. */
5562 if (code == MULT_EXPR
5563 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5565 op1 = const_binop (code, fold_convert (ctype, op1),
5566 fold_convert (ctype, c), 0);
5567 /* We allow the constant to overflow with wrapping semantics. */
5569 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5575 /* If we have an unsigned type is not a sizetype, we cannot widen
5576 the operation since it will change the result if the original
5577 computation overflowed. */
5578 if (TYPE_UNSIGNED (ctype)
5579 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5583 /* If we were able to eliminate our operation from the first side,
5584 apply our operation to the second side and reform the PLUS. */
5585 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5586 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5588 /* The last case is if we are a multiply. In that case, we can
5589 apply the distributive law to commute the multiply and addition
5590 if the multiplication of the constants doesn't overflow. */
5591 if (code == MULT_EXPR)
5592 return fold_build2 (tcode, ctype,
5593 fold_build2 (code, ctype,
5594 fold_convert (ctype, op0),
5595 fold_convert (ctype, c)),
5601 /* We have a special case here if we are doing something like
5602 (C * 8) % 4 since we know that's zero. */
5603 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5604 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5605 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5606 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5607 return omit_one_operand (type, integer_zero_node, op0);
5609 /* ... fall through ... */
5611 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5612 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5613 /* If we can extract our operation from the LHS, do so and return a
5614 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5615 do something only if the second operand is a constant. */
5617 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5618 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5619 fold_convert (ctype, op1));
5620 else if (tcode == MULT_EXPR && code == MULT_EXPR
5621 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5622 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5623 fold_convert (ctype, t1));
5624 else if (TREE_CODE (op1) != INTEGER_CST)
5627 /* If these are the same operation types, we can associate them
5628 assuming no overflow. */
5630 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5631 fold_convert (ctype, c), 0))
5632 && ! TREE_OVERFLOW (t1))
5633 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5635 /* If these operations "cancel" each other, we have the main
5636 optimizations of this pass, which occur when either constant is a
5637 multiple of the other, in which case we replace this with either an
5638 operation or CODE or TCODE.
5640 If we have an unsigned type that is not a sizetype, we cannot do
5641 this since it will change the result if the original computation
5643 if ((! TYPE_UNSIGNED (ctype)
5644 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5646 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5647 || (tcode == MULT_EXPR
5648 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5649 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5651 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5652 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5653 fold_convert (ctype,
5654 const_binop (TRUNC_DIV_EXPR,
5656 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5657 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5658 fold_convert (ctype,
5659 const_binop (TRUNC_DIV_EXPR,
5671 /* Return a node which has the indicated constant VALUE (either 0 or
5672 1), and is of the indicated TYPE. */
5675 constant_boolean_node (int value, tree type)
5677 if (type == integer_type_node)
5678 return value ? integer_one_node : integer_zero_node;
5679 else if (type == boolean_type_node)
5680 return value ? boolean_true_node : boolean_false_node;
5682 return build_int_cst (type, value);
5686 /* Return true if expr looks like an ARRAY_REF and set base and
5687 offset to the appropriate trees. If there is no offset,
5688 offset is set to NULL_TREE. Base will be canonicalized to
5689 something you can get the element type from using
5690 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5691 in bytes to the base. */
5694 extract_array_ref (tree expr, tree *base, tree *offset)
5696 /* One canonical form is a PLUS_EXPR with the first
5697 argument being an ADDR_EXPR with a possible NOP_EXPR
5699 if (TREE_CODE (expr) == PLUS_EXPR)
5701 tree op0 = TREE_OPERAND (expr, 0);
5702 tree inner_base, dummy1;
5703 /* Strip NOP_EXPRs here because the C frontends and/or
5704 folders present us (int *)&x.a + 4B possibly. */
5706 if (extract_array_ref (op0, &inner_base, &dummy1))
5709 if (dummy1 == NULL_TREE)
5710 *offset = TREE_OPERAND (expr, 1);
5712 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5713 dummy1, TREE_OPERAND (expr, 1));
5717 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5718 which we transform into an ADDR_EXPR with appropriate
5719 offset. For other arguments to the ADDR_EXPR we assume
5720 zero offset and as such do not care about the ADDR_EXPR
5721 type and strip possible nops from it. */
5722 else if (TREE_CODE (expr) == ADDR_EXPR)
5724 tree op0 = TREE_OPERAND (expr, 0);
5725 if (TREE_CODE (op0) == ARRAY_REF)
5727 tree idx = TREE_OPERAND (op0, 1);
5728 *base = TREE_OPERAND (op0, 0);
5729 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5730 array_ref_element_size (op0));
5734 /* Handle array-to-pointer decay as &a. */
5735 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5736 *base = TREE_OPERAND (expr, 0);
5739 *offset = NULL_TREE;
5743 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5744 else if (SSA_VAR_P (expr)
5745 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5748 *offset = NULL_TREE;
5756 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5757 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5758 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5759 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5760 COND is the first argument to CODE; otherwise (as in the example
5761 given here), it is the second argument. TYPE is the type of the
5762 original expression. Return NULL_TREE if no simplification is
5766 fold_binary_op_with_conditional_arg (enum tree_code code,
5767 tree type, tree op0, tree op1,
5768 tree cond, tree arg, int cond_first_p)
5770 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5771 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5772 tree test, true_value, false_value;
5773 tree lhs = NULL_TREE;
5774 tree rhs = NULL_TREE;
5776 /* This transformation is only worthwhile if we don't have to wrap
5777 arg in a SAVE_EXPR, and the operation can be simplified on at least
5778 one of the branches once its pushed inside the COND_EXPR. */
5779 if (!TREE_CONSTANT (arg))
5782 if (TREE_CODE (cond) == COND_EXPR)
5784 test = TREE_OPERAND (cond, 0);
5785 true_value = TREE_OPERAND (cond, 1);
5786 false_value = TREE_OPERAND (cond, 2);
5787 /* If this operand throws an expression, then it does not make
5788 sense to try to perform a logical or arithmetic operation
5790 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5792 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5797 tree testtype = TREE_TYPE (cond);
5799 true_value = constant_boolean_node (true, testtype);
5800 false_value = constant_boolean_node (false, testtype);
5803 arg = fold_convert (arg_type, arg);
5806 true_value = fold_convert (cond_type, true_value);
5808 lhs = fold_build2 (code, type, true_value, arg);
5810 lhs = fold_build2 (code, type, arg, true_value);
5814 false_value = fold_convert (cond_type, false_value);
5816 rhs = fold_build2 (code, type, false_value, arg);
5818 rhs = fold_build2 (code, type, arg, false_value);
5821 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5822 return fold_convert (type, test);
5826 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5828 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5829 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5830 ADDEND is the same as X.
5832 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5833 and finite. The problematic cases are when X is zero, and its mode
5834 has signed zeros. In the case of rounding towards -infinity,
5835 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5836 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5839 fold_real_zero_addition_p (tree type, tree addend, int negate)
5841 if (!real_zerop (addend))
5844 /* Don't allow the fold with -fsignaling-nans. */
5845 if (HONOR_SNANS (TYPE_MODE (type)))
5848 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5849 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5852 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5853 if (TREE_CODE (addend) == REAL_CST
5854 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5857 /* The mode has signed zeros, and we have to honor their sign.
5858 In this situation, there is only one case we can return true for.
5859 X - 0 is the same as X unless rounding towards -infinity is
5861 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5864 /* Subroutine of fold() that checks comparisons of built-in math
5865 functions against real constants.
5867 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5868 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5869 is the type of the result and ARG0 and ARG1 are the operands of the
5870 comparison. ARG1 must be a TREE_REAL_CST.
5872 The function returns the constant folded tree if a simplification
5873 can be made, and NULL_TREE otherwise. */
5876 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5877 tree type, tree arg0, tree arg1)
5881 if (BUILTIN_SQRT_P (fcode))
5883 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5884 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5886 c = TREE_REAL_CST (arg1);
5887 if (REAL_VALUE_NEGATIVE (c))
5889 /* sqrt(x) < y is always false, if y is negative. */
5890 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5891 return omit_one_operand (type, integer_zero_node, arg);
5893 /* sqrt(x) > y is always true, if y is negative and we
5894 don't care about NaNs, i.e. negative values of x. */
5895 if (code == NE_EXPR || !HONOR_NANS (mode))
5896 return omit_one_operand (type, integer_one_node, arg);
5898 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5899 return fold_build2 (GE_EXPR, type, arg,
5900 build_real (TREE_TYPE (arg), dconst0));
5902 else if (code == GT_EXPR || code == GE_EXPR)
5906 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5907 real_convert (&c2, mode, &c2);
5909 if (REAL_VALUE_ISINF (c2))
5911 /* sqrt(x) > y is x == +Inf, when y is very large. */
5912 if (HONOR_INFINITIES (mode))
5913 return fold_build2 (EQ_EXPR, type, arg,
5914 build_real (TREE_TYPE (arg), c2));
5916 /* sqrt(x) > y is always false, when y is very large
5917 and we don't care about infinities. */
5918 return omit_one_operand (type, integer_zero_node, arg);
5921 /* sqrt(x) > c is the same as x > c*c. */
5922 return fold_build2 (code, type, arg,
5923 build_real (TREE_TYPE (arg), c2));
5925 else if (code == LT_EXPR || code == LE_EXPR)
5929 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5930 real_convert (&c2, mode, &c2);
5932 if (REAL_VALUE_ISINF (c2))
5934 /* sqrt(x) < y is always true, when y is a very large
5935 value and we don't care about NaNs or Infinities. */
5936 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5937 return omit_one_operand (type, integer_one_node, arg);
5939 /* sqrt(x) < y is x != +Inf when y is very large and we
5940 don't care about NaNs. */
5941 if (! HONOR_NANS (mode))
5942 return fold_build2 (NE_EXPR, type, arg,
5943 build_real (TREE_TYPE (arg), c2));
5945 /* sqrt(x) < y is x >= 0 when y is very large and we
5946 don't care about Infinities. */
5947 if (! HONOR_INFINITIES (mode))
5948 return fold_build2 (GE_EXPR, type, arg,
5949 build_real (TREE_TYPE (arg), dconst0));
5951 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5952 if (lang_hooks.decls.global_bindings_p () != 0
5953 || CONTAINS_PLACEHOLDER_P (arg))
5956 arg = save_expr (arg);
5957 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5958 fold_build2 (GE_EXPR, type, arg,
5959 build_real (TREE_TYPE (arg),
5961 fold_build2 (NE_EXPR, type, arg,
5962 build_real (TREE_TYPE (arg),
5966 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5967 if (! HONOR_NANS (mode))
5968 return fold_build2 (code, type, arg,
5969 build_real (TREE_TYPE (arg), c2));
5971 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5972 if (lang_hooks.decls.global_bindings_p () == 0
5973 && ! CONTAINS_PLACEHOLDER_P (arg))
5975 arg = save_expr (arg);
5976 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5977 fold_build2 (GE_EXPR, type, arg,
5978 build_real (TREE_TYPE (arg),
5980 fold_build2 (code, type, arg,
5981 build_real (TREE_TYPE (arg),
5990 /* Subroutine of fold() that optimizes comparisons against Infinities,
5991 either +Inf or -Inf.
5993 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5994 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5995 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5997 The function returns the constant folded tree if a simplification
5998 can be made, and NULL_TREE otherwise. */
6001 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6003 enum machine_mode mode;
6004 REAL_VALUE_TYPE max;
6008 mode = TYPE_MODE (TREE_TYPE (arg0));
6010 /* For negative infinity swap the sense of the comparison. */
6011 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6013 code = swap_tree_comparison (code);
6018 /* x > +Inf is always false, if with ignore sNANs. */
6019 if (HONOR_SNANS (mode))
6021 return omit_one_operand (type, integer_zero_node, arg0);
6024 /* x <= +Inf is always true, if we don't case about NaNs. */
6025 if (! HONOR_NANS (mode))
6026 return omit_one_operand (type, integer_one_node, arg0);
6028 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6029 if (lang_hooks.decls.global_bindings_p () == 0
6030 && ! CONTAINS_PLACEHOLDER_P (arg0))
6032 arg0 = save_expr (arg0);
6033 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6039 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6040 real_maxval (&max, neg, mode);
6041 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6042 arg0, build_real (TREE_TYPE (arg0), max));
6045 /* x < +Inf is always equal to x <= DBL_MAX. */
6046 real_maxval (&max, neg, mode);
6047 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6048 arg0, build_real (TREE_TYPE (arg0), max));
6051 /* x != +Inf is always equal to !(x > DBL_MAX). */
6052 real_maxval (&max, neg, mode);
6053 if (! HONOR_NANS (mode))
6054 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6055 arg0, build_real (TREE_TYPE (arg0), max));
6057 /* The transformation below creates non-gimple code and thus is
6058 not appropriate if we are in gimple form. */
6062 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6063 arg0, build_real (TREE_TYPE (arg0), max));
6064 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6073 /* Subroutine of fold() that optimizes comparisons of a division by
6074 a nonzero integer constant against an integer constant, i.e.
6077 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6078 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6079 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6081 The function returns the constant folded tree if a simplification
6082 can be made, and NULL_TREE otherwise. */
6085 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6087 tree prod, tmp, hi, lo;
6088 tree arg00 = TREE_OPERAND (arg0, 0);
6089 tree arg01 = TREE_OPERAND (arg0, 1);
6090 unsigned HOST_WIDE_INT lpart;
6091 HOST_WIDE_INT hpart;
6095 /* We have to do this the hard way to detect unsigned overflow.
6096 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6097 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6098 TREE_INT_CST_HIGH (arg01),
6099 TREE_INT_CST_LOW (arg1),
6100 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6101 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6102 prod = force_fit_type (prod, -1, overflow, false);
6103 neg_overflow = false;
6105 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6107 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6110 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6111 overflow = add_double (TREE_INT_CST_LOW (prod),
6112 TREE_INT_CST_HIGH (prod),
6113 TREE_INT_CST_LOW (tmp),
6114 TREE_INT_CST_HIGH (tmp),
6116 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6117 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6118 TREE_CONSTANT_OVERFLOW (prod));
6120 else if (tree_int_cst_sgn (arg01) >= 0)
6122 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6123 switch (tree_int_cst_sgn (arg1))
6126 neg_overflow = true;
6127 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6132 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6137 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6147 /* A negative divisor reverses the relational operators. */
6148 code = swap_tree_comparison (code);
6150 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6151 switch (tree_int_cst_sgn (arg1))
6154 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6159 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6164 neg_overflow = true;
6165 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6177 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6178 return omit_one_operand (type, integer_zero_node, arg00);
6179 if (TREE_OVERFLOW (hi))
6180 return fold_build2 (GE_EXPR, type, arg00, lo);
6181 if (TREE_OVERFLOW (lo))
6182 return fold_build2 (LE_EXPR, type, arg00, hi);
6183 return build_range_check (type, arg00, 1, lo, hi);
6186 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6187 return omit_one_operand (type, integer_one_node, arg00);
6188 if (TREE_OVERFLOW (hi))
6189 return fold_build2 (LT_EXPR, type, arg00, lo);
6190 if (TREE_OVERFLOW (lo))
6191 return fold_build2 (GT_EXPR, type, arg00, hi);
6192 return build_range_check (type, arg00, 0, lo, hi);
6195 if (TREE_OVERFLOW (lo))
6197 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6198 return omit_one_operand (type, tmp, arg00);
6200 return fold_build2 (LT_EXPR, type, arg00, lo);
6203 if (TREE_OVERFLOW (hi))
6205 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6206 return omit_one_operand (type, tmp, arg00);
6208 return fold_build2 (LE_EXPR, type, arg00, hi);
6211 if (TREE_OVERFLOW (hi))
6213 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6214 return omit_one_operand (type, tmp, arg00);
6216 return fold_build2 (GT_EXPR, type, arg00, hi);
6219 if (TREE_OVERFLOW (lo))
6221 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6222 return omit_one_operand (type, tmp, arg00);
6224 return fold_build2 (GE_EXPR, type, arg00, lo);
6234 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6235 equality/inequality test, then return a simplified form of the test
6236 using a sign testing. Otherwise return NULL. TYPE is the desired
6240 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6243 /* If this is testing a single bit, we can optimize the test. */
6244 if ((code == NE_EXPR || code == EQ_EXPR)
6245 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6246 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6248 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6249 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6250 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6252 if (arg00 != NULL_TREE
6253 /* This is only a win if casting to a signed type is cheap,
6254 i.e. when arg00's type is not a partial mode. */
6255 && TYPE_PRECISION (TREE_TYPE (arg00))
6256 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6258 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6259 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6260 result_type, fold_convert (stype, arg00),
6261 build_int_cst (stype, 0));
6268 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6269 equality/inequality test, then return a simplified form of
6270 the test using shifts and logical operations. Otherwise return
6271 NULL. TYPE is the desired result type. */
6274 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6277 /* If this is testing a single bit, we can optimize the test. */
6278 if ((code == NE_EXPR || code == EQ_EXPR)
6279 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6280 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6282 tree inner = TREE_OPERAND (arg0, 0);
6283 tree type = TREE_TYPE (arg0);
6284 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6285 enum machine_mode operand_mode = TYPE_MODE (type);
6287 tree signed_type, unsigned_type, intermediate_type;
6290 /* First, see if we can fold the single bit test into a sign-bit
6292 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6297 /* Otherwise we have (A & C) != 0 where C is a single bit,
6298 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6299 Similarly for (A & C) == 0. */
6301 /* If INNER is a right shift of a constant and it plus BITNUM does
6302 not overflow, adjust BITNUM and INNER. */
6303 if (TREE_CODE (inner) == RSHIFT_EXPR
6304 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6305 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6306 && bitnum < TYPE_PRECISION (type)
6307 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6308 bitnum - TYPE_PRECISION (type)))
6310 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6311 inner = TREE_OPERAND (inner, 0);
6314 /* If we are going to be able to omit the AND below, we must do our
6315 operations as unsigned. If we must use the AND, we have a choice.
6316 Normally unsigned is faster, but for some machines signed is. */
6317 #ifdef LOAD_EXTEND_OP
6318 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6319 && !flag_syntax_only) ? 0 : 1;
6324 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6325 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6326 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6327 inner = fold_convert (intermediate_type, inner);
6330 inner = build2 (RSHIFT_EXPR, intermediate_type,
6331 inner, size_int (bitnum));
6333 if (code == EQ_EXPR)
6334 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6335 inner, integer_one_node);
6337 /* Put the AND last so it can combine with more things. */
6338 inner = build2 (BIT_AND_EXPR, intermediate_type,
6339 inner, integer_one_node);
6341 /* Make sure to return the proper type. */
6342 inner = fold_convert (result_type, inner);
6349 /* Check whether we are allowed to reorder operands arg0 and arg1,
6350 such that the evaluation of arg1 occurs before arg0. */
6353 reorder_operands_p (tree arg0, tree arg1)
6355 if (! flag_evaluation_order)
6357 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6359 return ! TREE_SIDE_EFFECTS (arg0)
6360 && ! TREE_SIDE_EFFECTS (arg1);
6363 /* Test whether it is preferable two swap two operands, ARG0 and
6364 ARG1, for example because ARG0 is an integer constant and ARG1
6365 isn't. If REORDER is true, only recommend swapping if we can
6366 evaluate the operands in reverse order. */
6369 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6371 STRIP_SIGN_NOPS (arg0);
6372 STRIP_SIGN_NOPS (arg1);
6374 if (TREE_CODE (arg1) == INTEGER_CST)
6376 if (TREE_CODE (arg0) == INTEGER_CST)
6379 if (TREE_CODE (arg1) == REAL_CST)
6381 if (TREE_CODE (arg0) == REAL_CST)
6384 if (TREE_CODE (arg1) == COMPLEX_CST)
6386 if (TREE_CODE (arg0) == COMPLEX_CST)
6389 if (TREE_CONSTANT (arg1))
6391 if (TREE_CONSTANT (arg0))
6397 if (reorder && flag_evaluation_order
6398 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6406 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6407 for commutative and comparison operators. Ensuring a canonical
6408 form allows the optimizers to find additional redundancies without
6409 having to explicitly check for both orderings. */
6410 if (TREE_CODE (arg0) == SSA_NAME
6411 && TREE_CODE (arg1) == SSA_NAME
6412 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6418 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6419 ARG0 is extended to a wider type. */
6422 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6424 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6426 tree shorter_type, outer_type;
6430 if (arg0_unw == arg0)
6432 shorter_type = TREE_TYPE (arg0_unw);
6434 #ifdef HAVE_canonicalize_funcptr_for_compare
6435 /* Disable this optimization if we're casting a function pointer
6436 type on targets that require function pointer canonicalization. */
6437 if (HAVE_canonicalize_funcptr_for_compare
6438 && TREE_CODE (shorter_type) == POINTER_TYPE
6439 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6443 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6446 arg1_unw = get_unwidened (arg1, shorter_type);
6448 /* If possible, express the comparison in the shorter mode. */
6449 if ((code == EQ_EXPR || code == NE_EXPR
6450 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6451 && (TREE_TYPE (arg1_unw) == shorter_type
6452 || (TREE_CODE (arg1_unw) == INTEGER_CST
6453 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6454 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6455 && int_fits_type_p (arg1_unw, shorter_type))))
6456 return fold_build2 (code, type, arg0_unw,
6457 fold_convert (shorter_type, arg1_unw));
6459 if (TREE_CODE (arg1_unw) != INTEGER_CST
6460 || TREE_CODE (shorter_type) != INTEGER_TYPE
6461 || !int_fits_type_p (arg1_unw, shorter_type))
6464 /* If we are comparing with the integer that does not fit into the range
6465 of the shorter type, the result is known. */
6466 outer_type = TREE_TYPE (arg1_unw);
6467 min = lower_bound_in_type (outer_type, shorter_type);
6468 max = upper_bound_in_type (outer_type, shorter_type);
6470 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6472 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6479 return omit_one_operand (type, integer_zero_node, arg0);
6484 return omit_one_operand (type, integer_one_node, arg0);
6490 return omit_one_operand (type, integer_one_node, arg0);
6492 return omit_one_operand (type, integer_zero_node, arg0);
6497 return omit_one_operand (type, integer_zero_node, arg0);
6499 return omit_one_operand (type, integer_one_node, arg0);
6508 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6509 ARG0 just the signedness is changed. */
6512 fold_sign_changed_comparison (enum tree_code code, tree type,
6513 tree arg0, tree arg1)
6515 tree arg0_inner, tmp;
6516 tree inner_type, outer_type;
6518 if (TREE_CODE (arg0) != NOP_EXPR
6519 && TREE_CODE (arg0) != CONVERT_EXPR)
6522 outer_type = TREE_TYPE (arg0);
6523 arg0_inner = TREE_OPERAND (arg0, 0);
6524 inner_type = TREE_TYPE (arg0_inner);
6526 #ifdef HAVE_canonicalize_funcptr_for_compare
6527 /* Disable this optimization if we're casting a function pointer
6528 type on targets that require function pointer canonicalization. */
6529 if (HAVE_canonicalize_funcptr_for_compare
6530 && TREE_CODE (inner_type) == POINTER_TYPE
6531 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6535 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6538 if (TREE_CODE (arg1) != INTEGER_CST
6539 && !((TREE_CODE (arg1) == NOP_EXPR
6540 || TREE_CODE (arg1) == CONVERT_EXPR)
6541 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6544 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6549 if (TREE_CODE (arg1) == INTEGER_CST)
6551 tmp = build_int_cst_wide (inner_type,
6552 TREE_INT_CST_LOW (arg1),
6553 TREE_INT_CST_HIGH (arg1));
6554 arg1 = force_fit_type (tmp, 0,
6555 TREE_OVERFLOW (arg1),
6556 TREE_CONSTANT_OVERFLOW (arg1));
6559 arg1 = fold_convert (inner_type, arg1);
6561 return fold_build2 (code, type, arg0_inner, arg1);
6564 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6565 step of the array. Reconstructs s and delta in the case of s * delta
6566 being an integer constant (and thus already folded).
6567 ADDR is the address. MULT is the multiplicative expression.
6568 If the function succeeds, the new address expression is returned. Otherwise
6569 NULL_TREE is returned. */
6572 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6574 tree s, delta, step;
6575 tree ref = TREE_OPERAND (addr, 0), pref;
6579 /* Canonicalize op1 into a possibly non-constant delta
6580 and an INTEGER_CST s. */
6581 if (TREE_CODE (op1) == MULT_EXPR)
6583 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6588 if (TREE_CODE (arg0) == INTEGER_CST)
6593 else if (TREE_CODE (arg1) == INTEGER_CST)
6601 else if (TREE_CODE (op1) == INTEGER_CST)
6608 /* Simulate we are delta * 1. */
6610 s = integer_one_node;
6613 for (;; ref = TREE_OPERAND (ref, 0))
6615 if (TREE_CODE (ref) == ARRAY_REF)
6617 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6621 step = array_ref_element_size (ref);
6622 if (TREE_CODE (step) != INTEGER_CST)
6627 if (! tree_int_cst_equal (step, s))
6632 /* Try if delta is a multiple of step. */
6633 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6642 if (!handled_component_p (ref))
6646 /* We found the suitable array reference. So copy everything up to it,
6647 and replace the index. */
6649 pref = TREE_OPERAND (addr, 0);
6650 ret = copy_node (pref);
6655 pref = TREE_OPERAND (pref, 0);
6656 TREE_OPERAND (pos, 0) = copy_node (pref);
6657 pos = TREE_OPERAND (pos, 0);
6660 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6661 fold_convert (itype,
6662 TREE_OPERAND (pos, 1)),
6663 fold_convert (itype, delta));
6665 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6669 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6670 means A >= Y && A != MAX, but in this case we know that
6671 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6674 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6676 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6678 if (TREE_CODE (bound) == LT_EXPR)
6679 a = TREE_OPERAND (bound, 0);
6680 else if (TREE_CODE (bound) == GT_EXPR)
6681 a = TREE_OPERAND (bound, 1);
6685 typea = TREE_TYPE (a);
6686 if (!INTEGRAL_TYPE_P (typea)
6687 && !POINTER_TYPE_P (typea))
6690 if (TREE_CODE (ineq) == LT_EXPR)
6692 a1 = TREE_OPERAND (ineq, 1);
6693 y = TREE_OPERAND (ineq, 0);
6695 else if (TREE_CODE (ineq) == GT_EXPR)
6697 a1 = TREE_OPERAND (ineq, 0);
6698 y = TREE_OPERAND (ineq, 1);
6703 if (TREE_TYPE (a1) != typea)
6706 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6707 if (!integer_onep (diff))
6710 return fold_build2 (GE_EXPR, type, a, y);
6713 /* Fold a sum or difference of at least one multiplication.
6714 Returns the folded tree or NULL if no simplification could be made. */
6717 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6719 tree arg00, arg01, arg10, arg11;
6720 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6722 /* (A * C) +- (B * C) -> (A+-B) * C.
6723 (A * C) +- A -> A * (C+-1).
6724 We are most concerned about the case where C is a constant,
6725 but other combinations show up during loop reduction. Since
6726 it is not difficult, try all four possibilities. */
6728 if (TREE_CODE (arg0) == MULT_EXPR)
6730 arg00 = TREE_OPERAND (arg0, 0);
6731 arg01 = TREE_OPERAND (arg0, 1);
6736 arg01 = fold_convert (type, integer_one_node);
6738 if (TREE_CODE (arg1) == MULT_EXPR)
6740 arg10 = TREE_OPERAND (arg1, 0);
6741 arg11 = TREE_OPERAND (arg1, 1);
6746 arg11 = fold_convert (type, integer_one_node);
6750 if (operand_equal_p (arg01, arg11, 0))
6751 same = arg01, alt0 = arg00, alt1 = arg10;
6752 else if (operand_equal_p (arg00, arg10, 0))
6753 same = arg00, alt0 = arg01, alt1 = arg11;
6754 else if (operand_equal_p (arg00, arg11, 0))
6755 same = arg00, alt0 = arg01, alt1 = arg10;
6756 else if (operand_equal_p (arg01, arg10, 0))
6757 same = arg01, alt0 = arg00, alt1 = arg11;
6759 /* No identical multiplicands; see if we can find a common
6760 power-of-two factor in non-power-of-two multiplies. This
6761 can help in multi-dimensional array access. */
6762 else if (host_integerp (arg01, 0)
6763 && host_integerp (arg11, 0))
6765 HOST_WIDE_INT int01, int11, tmp;
6768 int01 = TREE_INT_CST_LOW (arg01);
6769 int11 = TREE_INT_CST_LOW (arg11);
6771 /* Move min of absolute values to int11. */
6772 if ((int01 >= 0 ? int01 : -int01)
6773 < (int11 >= 0 ? int11 : -int11))
6775 tmp = int01, int01 = int11, int11 = tmp;
6776 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6783 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6785 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6786 build_int_cst (TREE_TYPE (arg00),
6791 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6796 return fold_build2 (MULT_EXPR, type,
6797 fold_build2 (code, type,
6798 fold_convert (type, alt0),
6799 fold_convert (type, alt1)),
6800 fold_convert (type, same));
6805 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6806 specified by EXPR into the buffer PTR of length LEN bytes.
6807 Return the number of bytes placed in the buffer, or zero
6811 native_encode_int (tree expr, unsigned char *ptr, int len)
6813 tree type = TREE_TYPE (expr);
6814 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6815 int byte, offset, word, words;
6816 unsigned char value;
6818 if (total_bytes > len)
6820 words = total_bytes / UNITS_PER_WORD;
6822 for (byte = 0; byte < total_bytes; byte++)
6824 int bitpos = byte * BITS_PER_UNIT;
6825 if (bitpos < HOST_BITS_PER_WIDE_INT)
6826 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6828 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6829 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6831 if (total_bytes > UNITS_PER_WORD)
6833 word = byte / UNITS_PER_WORD;
6834 if (WORDS_BIG_ENDIAN)
6835 word = (words - 1) - word;
6836 offset = word * UNITS_PER_WORD;
6837 if (BYTES_BIG_ENDIAN)
6838 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6840 offset += byte % UNITS_PER_WORD;
6843 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6844 ptr[offset] = value;
6850 /* Subroutine of native_encode_expr. Encode the REAL_CST
6851 specified by EXPR into the buffer PTR of length LEN bytes.
6852 Return the number of bytes placed in the buffer, or zero
6856 native_encode_real (tree expr, unsigned char *ptr, int len)
6858 tree type = TREE_TYPE (expr);
6859 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6860 int byte, offset, word, words;
6861 unsigned char value;
6863 /* There are always 32 bits in each long, no matter the size of
6864 the hosts long. We handle floating point representations with
6868 if (total_bytes > len)
6870 words = total_bytes / UNITS_PER_WORD;
6872 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6874 for (byte = 0; byte < total_bytes; byte++)
6876 int bitpos = byte * BITS_PER_UNIT;
6877 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6879 if (total_bytes > UNITS_PER_WORD)
6881 word = byte / UNITS_PER_WORD;
6882 if (FLOAT_WORDS_BIG_ENDIAN)
6883 word = (words - 1) - word;
6884 offset = word * UNITS_PER_WORD;
6885 if (BYTES_BIG_ENDIAN)
6886 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6888 offset += byte % UNITS_PER_WORD;
6891 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6892 ptr[offset] = value;
6897 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6898 specified by EXPR into the buffer PTR of length LEN bytes.
6899 Return the number of bytes placed in the buffer, or zero
6903 native_encode_complex (tree expr, unsigned char *ptr, int len)
6908 part = TREE_REALPART (expr);
6909 rsize = native_encode_expr (part, ptr, len);
6912 part = TREE_IMAGPART (expr);
6913 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6916 return rsize + isize;
6920 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6921 specified by EXPR into the buffer PTR of length LEN bytes.
6922 Return the number of bytes placed in the buffer, or zero
6926 native_encode_vector (tree expr, unsigned char *ptr, int len)
6928 int i, size, offset, count;
6929 tree elem, elements;
6933 elements = TREE_VECTOR_CST_ELTS (expr);
6934 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6935 for (i = 0; i < count; i++)
6939 elem = TREE_VALUE (elements);
6940 elements = TREE_CHAIN (elements);
6947 size = native_encode_expr (elem, ptr+offset, len-offset);
6953 if (offset + size > len)
6955 memset (ptr+offset, 0, size);
6965 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6966 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6967 buffer PTR of length LEN bytes. Return the number of bytes
6968 placed in the buffer, or zero upon failure. */
6971 native_encode_expr (tree expr, unsigned char *ptr, int len)
6973 switch (TREE_CODE (expr))
6976 return native_encode_int (expr, ptr, len);
6979 return native_encode_real (expr, ptr, len);
6982 return native_encode_complex (expr, ptr, len);
6985 return native_encode_vector (expr, ptr, len);
6993 /* Subroutine of native_interpret_expr. Interpret the contents of
6994 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6995 If the buffer cannot be interpreted, return NULL_TREE. */
6998 native_interpret_int (tree type, unsigned char *ptr, int len)
7000 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7001 int byte, offset, word, words;
7002 unsigned char value;
7003 unsigned int HOST_WIDE_INT lo = 0;
7004 HOST_WIDE_INT hi = 0;
7006 if (total_bytes > len)
7008 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7010 words = total_bytes / UNITS_PER_WORD;
7012 for (byte = 0; byte < total_bytes; byte++)
7014 int bitpos = byte * BITS_PER_UNIT;
7015 if (total_bytes > UNITS_PER_WORD)
7017 word = byte / UNITS_PER_WORD;
7018 if (WORDS_BIG_ENDIAN)
7019 word = (words - 1) - word;
7020 offset = word * UNITS_PER_WORD;
7021 if (BYTES_BIG_ENDIAN)
7022 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7024 offset += byte % UNITS_PER_WORD;
7027 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7028 value = ptr[offset];
7030 if (bitpos < HOST_BITS_PER_WIDE_INT)
7031 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7033 hi |= (unsigned HOST_WIDE_INT) value
7034 << (bitpos - HOST_BITS_PER_WIDE_INT);
7037 return force_fit_type (build_int_cst_wide (type, lo, hi),
7042 /* Subroutine of native_interpret_expr. Interpret the contents of
7043 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7044 If the buffer cannot be interpreted, return NULL_TREE. */
7047 native_interpret_real (tree type, unsigned char *ptr, int len)
7049 enum machine_mode mode = TYPE_MODE (type);
7050 int total_bytes = GET_MODE_SIZE (mode);
7051 int byte, offset, word, words;
7052 unsigned char value;
7053 /* There are always 32 bits in each long, no matter the size of
7054 the hosts long. We handle floating point representations with
7059 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7060 if (total_bytes > len || total_bytes > 24)
7062 words = total_bytes / UNITS_PER_WORD;
7064 memset (tmp, 0, sizeof (tmp));
7065 for (byte = 0; byte < total_bytes; byte++)
7067 int bitpos = byte * BITS_PER_UNIT;
7068 if (total_bytes > UNITS_PER_WORD)
7070 word = byte / UNITS_PER_WORD;
7071 if (FLOAT_WORDS_BIG_ENDIAN)
7072 word = (words - 1) - word;
7073 offset = word * UNITS_PER_WORD;
7074 if (BYTES_BIG_ENDIAN)
7075 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7077 offset += byte % UNITS_PER_WORD;
7080 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7081 value = ptr[offset];
7083 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7086 real_from_target (&r, tmp, mode);
7087 return build_real (type, r);
7091 /* Subroutine of native_interpret_expr. Interpret the contents of
7092 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7093 If the buffer cannot be interpreted, return NULL_TREE. */
7096 native_interpret_complex (tree type, unsigned char *ptr, int len)
7098 tree etype, rpart, ipart;
7101 etype = TREE_TYPE (type);
7102 size = GET_MODE_SIZE (TYPE_MODE (etype));
7105 rpart = native_interpret_expr (etype, ptr, size);
7108 ipart = native_interpret_expr (etype, ptr+size, size);
7111 return build_complex (type, rpart, ipart);
7115 /* Subroutine of native_interpret_expr. Interpret the contents of
7116 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7117 If the buffer cannot be interpreted, return NULL_TREE. */
7120 native_interpret_vector (tree type, unsigned char *ptr, int len)
7122 tree etype, elem, elements;
7125 etype = TREE_TYPE (type);
7126 size = GET_MODE_SIZE (TYPE_MODE (etype));
7127 count = TYPE_VECTOR_SUBPARTS (type);
7128 if (size * count > len)
7131 elements = NULL_TREE;
7132 for (i = count - 1; i >= 0; i--)
7134 elem = native_interpret_expr (etype, ptr+(i*size), size);
7137 elements = tree_cons (NULL_TREE, elem, elements);
7139 return build_vector (type, elements);
7143 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7144 the buffer PTR of length LEN as a constant of type TYPE. For
7145 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7146 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7147 return NULL_TREE. */
7150 native_interpret_expr (tree type, unsigned char *ptr, int len)
7152 switch (TREE_CODE (type))
7157 return native_interpret_int (type, ptr, len);
7160 return native_interpret_real (type, ptr, len);
7163 return native_interpret_complex (type, ptr, len);
7166 return native_interpret_vector (type, ptr, len);
7174 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7175 TYPE at compile-time. If we're unable to perform the conversion
7176 return NULL_TREE. */
7179 fold_view_convert_expr (tree type, tree expr)
7181 /* We support up to 512-bit values (for V8DFmode). */
7182 unsigned char buffer[64];
7185 /* Check that the host and target are sane. */
7186 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7189 len = native_encode_expr (expr, buffer, sizeof (buffer));
7193 return native_interpret_expr (type, buffer, len);
7197 /* Fold a unary expression of code CODE and type TYPE with operand
7198 OP0. Return the folded expression if folding is successful.
7199 Otherwise, return NULL_TREE. */
7202 fold_unary (enum tree_code code, tree type, tree op0)
7206 enum tree_code_class kind = TREE_CODE_CLASS (code);
7208 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7209 && TREE_CODE_LENGTH (code) == 1);
7214 if (code == NOP_EXPR || code == CONVERT_EXPR
7215 || code == FLOAT_EXPR || code == ABS_EXPR)
7217 /* Don't use STRIP_NOPS, because signedness of argument type
7219 STRIP_SIGN_NOPS (arg0);
7223 /* Strip any conversions that don't change the mode. This
7224 is safe for every expression, except for a comparison
7225 expression because its signedness is derived from its
7228 Note that this is done as an internal manipulation within
7229 the constant folder, in order to find the simplest
7230 representation of the arguments so that their form can be
7231 studied. In any cases, the appropriate type conversions
7232 should be put back in the tree that will get out of the
7238 if (TREE_CODE_CLASS (code) == tcc_unary)
7240 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7241 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7242 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7243 else if (TREE_CODE (arg0) == COND_EXPR)
7245 tree arg01 = TREE_OPERAND (arg0, 1);
7246 tree arg02 = TREE_OPERAND (arg0, 2);
7247 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7248 arg01 = fold_build1 (code, type, arg01);
7249 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7250 arg02 = fold_build1 (code, type, arg02);
7251 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7254 /* If this was a conversion, and all we did was to move into
7255 inside the COND_EXPR, bring it back out. But leave it if
7256 it is a conversion from integer to integer and the
7257 result precision is no wider than a word since such a
7258 conversion is cheap and may be optimized away by combine,
7259 while it couldn't if it were outside the COND_EXPR. Then return
7260 so we don't get into an infinite recursion loop taking the
7261 conversion out and then back in. */
7263 if ((code == NOP_EXPR || code == CONVERT_EXPR
7264 || code == NON_LVALUE_EXPR)
7265 && TREE_CODE (tem) == COND_EXPR
7266 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7267 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7268 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7269 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7270 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7271 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7272 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7274 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7275 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7276 || flag_syntax_only))
7277 tem = build1 (code, type,
7279 TREE_TYPE (TREE_OPERAND
7280 (TREE_OPERAND (tem, 1), 0)),
7281 TREE_OPERAND (tem, 0),
7282 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7283 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7286 else if (COMPARISON_CLASS_P (arg0))
7288 if (TREE_CODE (type) == BOOLEAN_TYPE)
7290 arg0 = copy_node (arg0);
7291 TREE_TYPE (arg0) = type;
7294 else if (TREE_CODE (type) != INTEGER_TYPE)
7295 return fold_build3 (COND_EXPR, type, arg0,
7296 fold_build1 (code, type,
7298 fold_build1 (code, type,
7299 integer_zero_node));
7308 case FIX_TRUNC_EXPR:
7310 case FIX_FLOOR_EXPR:
7311 case FIX_ROUND_EXPR:
7312 if (TREE_TYPE (op0) == type)
7315 /* If we have (type) (a CMP b) and type is an integral type, return
7316 new expression involving the new type. */
7317 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7318 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7319 TREE_OPERAND (op0, 1));
7321 /* Handle cases of two conversions in a row. */
7322 if (TREE_CODE (op0) == NOP_EXPR
7323 || TREE_CODE (op0) == CONVERT_EXPR)
7325 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7326 tree inter_type = TREE_TYPE (op0);
7327 int inside_int = INTEGRAL_TYPE_P (inside_type);
7328 int inside_ptr = POINTER_TYPE_P (inside_type);
7329 int inside_float = FLOAT_TYPE_P (inside_type);
7330 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7331 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7332 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7333 int inter_int = INTEGRAL_TYPE_P (inter_type);
7334 int inter_ptr = POINTER_TYPE_P (inter_type);
7335 int inter_float = FLOAT_TYPE_P (inter_type);
7336 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7337 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7338 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7339 int final_int = INTEGRAL_TYPE_P (type);
7340 int final_ptr = POINTER_TYPE_P (type);
7341 int final_float = FLOAT_TYPE_P (type);
7342 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7343 unsigned int final_prec = TYPE_PRECISION (type);
7344 int final_unsignedp = TYPE_UNSIGNED (type);
7346 /* In addition to the cases of two conversions in a row
7347 handled below, if we are converting something to its own
7348 type via an object of identical or wider precision, neither
7349 conversion is needed. */
7350 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7351 && (((inter_int || inter_ptr) && final_int)
7352 || (inter_float && final_float))
7353 && inter_prec >= final_prec)
7354 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7356 /* Likewise, if the intermediate and final types are either both
7357 float or both integer, we don't need the middle conversion if
7358 it is wider than the final type and doesn't change the signedness
7359 (for integers). Avoid this if the final type is a pointer
7360 since then we sometimes need the inner conversion. Likewise if
7361 the outer has a precision not equal to the size of its mode. */
7362 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7363 || (inter_float && inside_float)
7364 || (inter_vec && inside_vec))
7365 && inter_prec >= inside_prec
7366 && (inter_float || inter_vec
7367 || inter_unsignedp == inside_unsignedp)
7368 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7369 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7371 && (! final_vec || inter_prec == inside_prec))
7372 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7374 /* If we have a sign-extension of a zero-extended value, we can
7375 replace that by a single zero-extension. */
7376 if (inside_int && inter_int && final_int
7377 && inside_prec < inter_prec && inter_prec < final_prec
7378 && inside_unsignedp && !inter_unsignedp)
7379 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7381 /* Two conversions in a row are not needed unless:
7382 - some conversion is floating-point (overstrict for now), or
7383 - some conversion is a vector (overstrict for now), or
7384 - the intermediate type is narrower than both initial and
7386 - the intermediate type and innermost type differ in signedness,
7387 and the outermost type is wider than the intermediate, or
7388 - the initial type is a pointer type and the precisions of the
7389 intermediate and final types differ, or
7390 - the final type is a pointer type and the precisions of the
7391 initial and intermediate types differ.
7392 - the final type is a pointer type and the initial type not
7393 - the initial type is a pointer to an array and the final type
7395 if (! inside_float && ! inter_float && ! final_float
7396 && ! inside_vec && ! inter_vec && ! final_vec
7397 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7398 && ! (inside_int && inter_int
7399 && inter_unsignedp != inside_unsignedp
7400 && inter_prec < final_prec)
7401 && ((inter_unsignedp && inter_prec > inside_prec)
7402 == (final_unsignedp && final_prec > inter_prec))
7403 && ! (inside_ptr && inter_prec != final_prec)
7404 && ! (final_ptr && inside_prec != inter_prec)
7405 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7406 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7407 && final_ptr == inside_ptr
7409 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7410 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7411 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7414 /* Handle (T *)&A.B.C for A being of type T and B and C
7415 living at offset zero. This occurs frequently in
7416 C++ upcasting and then accessing the base. */
7417 if (TREE_CODE (op0) == ADDR_EXPR
7418 && POINTER_TYPE_P (type)
7419 && handled_component_p (TREE_OPERAND (op0, 0)))
7421 HOST_WIDE_INT bitsize, bitpos;
7423 enum machine_mode mode;
7424 int unsignedp, volatilep;
7425 tree base = TREE_OPERAND (op0, 0);
7426 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7427 &mode, &unsignedp, &volatilep, false);
7428 /* If the reference was to a (constant) zero offset, we can use
7429 the address of the base if it has the same base type
7430 as the result type. */
7431 if (! offset && bitpos == 0
7432 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7433 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7434 return fold_convert (type, build_fold_addr_expr (base));
7437 if (TREE_CODE (op0) == MODIFY_EXPR
7438 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7439 /* Detect assigning a bitfield. */
7440 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7441 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7443 /* Don't leave an assignment inside a conversion
7444 unless assigning a bitfield. */
7445 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7446 /* First do the assignment, then return converted constant. */
7447 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7448 TREE_NO_WARNING (tem) = 1;
7449 TREE_USED (tem) = 1;
7453 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7454 constants (if x has signed type, the sign bit cannot be set
7455 in c). This folds extension into the BIT_AND_EXPR. */
7456 if (INTEGRAL_TYPE_P (type)
7457 && TREE_CODE (type) != BOOLEAN_TYPE
7458 && TREE_CODE (op0) == BIT_AND_EXPR
7459 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7462 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7465 if (TYPE_UNSIGNED (TREE_TYPE (and))
7466 || (TYPE_PRECISION (type)
7467 <= TYPE_PRECISION (TREE_TYPE (and))))
7469 else if (TYPE_PRECISION (TREE_TYPE (and1))
7470 <= HOST_BITS_PER_WIDE_INT
7471 && host_integerp (and1, 1))
7473 unsigned HOST_WIDE_INT cst;
7475 cst = tree_low_cst (and1, 1);
7476 cst &= (HOST_WIDE_INT) -1
7477 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7478 change = (cst == 0);
7479 #ifdef LOAD_EXTEND_OP
7481 && !flag_syntax_only
7482 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7485 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7486 and0 = fold_convert (uns, and0);
7487 and1 = fold_convert (uns, and1);
7493 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7494 TREE_INT_CST_HIGH (and1));
7495 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7496 TREE_CONSTANT_OVERFLOW (and1));
7497 return fold_build2 (BIT_AND_EXPR, type,
7498 fold_convert (type, and0), tem);
7502 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7503 T2 being pointers to types of the same size. */
7504 if (POINTER_TYPE_P (type)
7505 && BINARY_CLASS_P (arg0)
7506 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7507 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7509 tree arg00 = TREE_OPERAND (arg0, 0);
7511 tree t1 = TREE_TYPE (arg00);
7512 tree tt0 = TREE_TYPE (t0);
7513 tree tt1 = TREE_TYPE (t1);
7514 tree s0 = TYPE_SIZE (tt0);
7515 tree s1 = TYPE_SIZE (tt1);
7517 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7518 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7519 TREE_OPERAND (arg0, 1));
7522 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7523 of the same precision, and X is a integer type not narrower than
7524 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7525 if (INTEGRAL_TYPE_P (type)
7526 && TREE_CODE (op0) == BIT_NOT_EXPR
7527 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7528 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7529 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7530 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7532 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7533 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7534 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7535 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7538 tem = fold_convert_const (code, type, arg0);
7539 return tem ? tem : NULL_TREE;
7541 case VIEW_CONVERT_EXPR:
7542 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7543 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7544 return fold_view_convert_expr (type, op0);
7547 if (negate_expr_p (arg0))
7548 return fold_convert (type, negate_expr (arg0));
7552 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7553 return fold_abs_const (arg0, type);
7554 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7555 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7556 /* Convert fabs((double)float) into (double)fabsf(float). */
7557 else if (TREE_CODE (arg0) == NOP_EXPR
7558 && TREE_CODE (type) == REAL_TYPE)
7560 tree targ0 = strip_float_extensions (arg0);
7562 return fold_convert (type, fold_build1 (ABS_EXPR,
7566 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7567 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7570 /* Strip sign ops from argument. */
7571 if (TREE_CODE (type) == REAL_TYPE)
7573 tem = fold_strip_sign_ops (arg0);
7575 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7580 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7581 return fold_convert (type, arg0);
7582 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7584 tree itype = TREE_TYPE (type);
7585 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7586 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7587 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7589 if (TREE_CODE (arg0) == COMPLEX_CST)
7591 tree itype = TREE_TYPE (type);
7592 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7593 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7594 return build_complex (type, rpart, negate_expr (ipart));
7596 if (TREE_CODE (arg0) == CONJ_EXPR)
7597 return fold_convert (type, TREE_OPERAND (arg0, 0));
7601 if (TREE_CODE (arg0) == INTEGER_CST)
7602 return fold_not_const (arg0, type);
7603 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7604 return TREE_OPERAND (arg0, 0);
7605 /* Convert ~ (-A) to A - 1. */
7606 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7607 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7608 build_int_cst (type, 1));
7609 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7610 else if (INTEGRAL_TYPE_P (type)
7611 && ((TREE_CODE (arg0) == MINUS_EXPR
7612 && integer_onep (TREE_OPERAND (arg0, 1)))
7613 || (TREE_CODE (arg0) == PLUS_EXPR
7614 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7615 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7616 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7617 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7618 && (tem = fold_unary (BIT_NOT_EXPR, type,
7620 TREE_OPERAND (arg0, 0)))))
7621 return fold_build2 (BIT_XOR_EXPR, type, tem,
7622 fold_convert (type, TREE_OPERAND (arg0, 1)));
7623 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7624 && (tem = fold_unary (BIT_NOT_EXPR, type,
7626 TREE_OPERAND (arg0, 1)))))
7627 return fold_build2 (BIT_XOR_EXPR, type,
7628 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7632 case TRUTH_NOT_EXPR:
7633 /* The argument to invert_truthvalue must have Boolean type. */
7634 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7635 arg0 = fold_convert (boolean_type_node, arg0);
7637 /* Note that the operand of this must be an int
7638 and its values must be 0 or 1.
7639 ("true" is a fixed value perhaps depending on the language,
7640 but we don't handle values other than 1 correctly yet.) */
7641 tem = fold_truth_not_expr (arg0);
7644 return fold_convert (type, tem);
7647 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7648 return fold_convert (type, arg0);
7649 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7650 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7651 TREE_OPERAND (arg0, 1));
7652 if (TREE_CODE (arg0) == COMPLEX_CST)
7653 return fold_convert (type, TREE_REALPART (arg0));
7654 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7656 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7657 tem = fold_build2 (TREE_CODE (arg0), itype,
7658 fold_build1 (REALPART_EXPR, itype,
7659 TREE_OPERAND (arg0, 0)),
7660 fold_build1 (REALPART_EXPR, itype,
7661 TREE_OPERAND (arg0, 1)));
7662 return fold_convert (type, tem);
7664 if (TREE_CODE (arg0) == CONJ_EXPR)
7666 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7667 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7668 return fold_convert (type, tem);
7673 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7674 return fold_convert (type, integer_zero_node);
7675 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7676 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7677 TREE_OPERAND (arg0, 0));
7678 if (TREE_CODE (arg0) == COMPLEX_CST)
7679 return fold_convert (type, TREE_IMAGPART (arg0));
7680 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7682 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7683 tem = fold_build2 (TREE_CODE (arg0), itype,
7684 fold_build1 (IMAGPART_EXPR, itype,
7685 TREE_OPERAND (arg0, 0)),
7686 fold_build1 (IMAGPART_EXPR, itype,
7687 TREE_OPERAND (arg0, 1)));
7688 return fold_convert (type, tem);
7690 if (TREE_CODE (arg0) == CONJ_EXPR)
7692 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7693 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7694 return fold_convert (type, negate_expr (tem));
7700 } /* switch (code) */
7703 /* Fold a binary expression of code CODE and type TYPE with operands
7704 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7705 Return the folded expression if folding is successful. Otherwise,
7706 return NULL_TREE. */
7709 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7711 enum tree_code compl_code;
7713 if (code == MIN_EXPR)
7714 compl_code = MAX_EXPR;
7715 else if (code == MAX_EXPR)
7716 compl_code = MIN_EXPR;
7720 /* MIN (MAX (a, b), b) == b. Â */
7721 if (TREE_CODE (op0) == compl_code
7722 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7723 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7725 /* MIN (MAX (b, a), b) == b. Â */
7726 if (TREE_CODE (op0) == compl_code
7727 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7728 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7729 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7731 /* MIN (a, MAX (a, b)) == a. Â */
7732 if (TREE_CODE (op1) == compl_code
7733 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7734 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7735 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7737 /* MIN (a, MAX (b, a)) == a. Â */
7738 if (TREE_CODE (op1) == compl_code
7739 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7740 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7741 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7746 /* Subroutine of fold_binary. This routine performs all of the
7747 transformations that are common to the equality/inequality
7748 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7749 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7750 fold_binary should call fold_binary. Fold a comparison with
7751 tree code CODE and type TYPE with operands OP0 and OP1. Return
7752 the folded comparison or NULL_TREE. */
7755 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7757 tree arg0, arg1, tem;
7762 STRIP_SIGN_NOPS (arg0);
7763 STRIP_SIGN_NOPS (arg1);
7765 tem = fold_relational_const (code, type, arg0, arg1);
7766 if (tem != NULL_TREE)
7769 /* If one arg is a real or integer constant, put it last. */
7770 if (tree_swap_operands_p (arg0, arg1, true))
7771 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7773 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7774 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7775 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7776 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7777 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7778 && !(flag_wrapv || flag_trapv))
7779 && (TREE_CODE (arg1) == INTEGER_CST
7780 && !TREE_OVERFLOW (arg1)))
7782 tree const1 = TREE_OPERAND (arg0, 1);
7784 tree variable = TREE_OPERAND (arg0, 0);
7787 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7789 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7790 TREE_TYPE (arg1), const2, const1);
7791 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7792 && (TREE_CODE (lhs) != INTEGER_CST
7793 || !TREE_OVERFLOW (lhs)))
7794 return fold_build2 (code, type, variable, lhs);
7797 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7798 same object, then we can fold this to a comparison of the two offsets in
7799 signed size type. This is possible because pointer arithmetic is
7800 restricted to retain within an object and overflow on pointer differences
7801 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7802 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7803 && !flag_wrapv && !flag_trapv)
7805 tree base0, offset0, base1, offset1;
7807 if (extract_array_ref (arg0, &base0, &offset0)
7808 && extract_array_ref (arg1, &base1, &offset1)
7809 && operand_equal_p (base0, base1, 0))
7811 tree signed_size_type_node;
7812 signed_size_type_node = signed_type_for (size_type_node);
7814 /* By converting to signed size type we cover middle-end pointer
7815 arithmetic which operates on unsigned pointer types of size
7816 type size and ARRAY_REF offsets which are properly sign or
7817 zero extended from their type in case it is narrower than
7819 if (offset0 == NULL_TREE)
7820 offset0 = build_int_cst (signed_size_type_node, 0);
7822 offset0 = fold_convert (signed_size_type_node, offset0);
7823 if (offset1 == NULL_TREE)
7824 offset1 = build_int_cst (signed_size_type_node, 0);
7826 offset1 = fold_convert (signed_size_type_node, offset1);
7828 return fold_build2 (code, type, offset0, offset1);
7832 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7834 tree targ0 = strip_float_extensions (arg0);
7835 tree targ1 = strip_float_extensions (arg1);
7836 tree newtype = TREE_TYPE (targ0);
7838 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7839 newtype = TREE_TYPE (targ1);
7841 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7842 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7843 return fold_build2 (code, type, fold_convert (newtype, targ0),
7844 fold_convert (newtype, targ1));
7846 /* (-a) CMP (-b) -> b CMP a */
7847 if (TREE_CODE (arg0) == NEGATE_EXPR
7848 && TREE_CODE (arg1) == NEGATE_EXPR)
7849 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7850 TREE_OPERAND (arg0, 0));
7852 if (TREE_CODE (arg1) == REAL_CST)
7854 REAL_VALUE_TYPE cst;
7855 cst = TREE_REAL_CST (arg1);
7857 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7858 if (TREE_CODE (arg0) == NEGATE_EXPR)
7859 return fold_build2 (swap_tree_comparison (code), type,
7860 TREE_OPERAND (arg0, 0),
7861 build_real (TREE_TYPE (arg1),
7862 REAL_VALUE_NEGATE (cst)));
7864 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7865 /* a CMP (-0) -> a CMP 0 */
7866 if (REAL_VALUE_MINUS_ZERO (cst))
7867 return fold_build2 (code, type, arg0,
7868 build_real (TREE_TYPE (arg1), dconst0));
7870 /* x != NaN is always true, other ops are always false. */
7871 if (REAL_VALUE_ISNAN (cst)
7872 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7874 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7875 return omit_one_operand (type, tem, arg0);
7878 /* Fold comparisons against infinity. */
7879 if (REAL_VALUE_ISINF (cst))
7881 tem = fold_inf_compare (code, type, arg0, arg1);
7882 if (tem != NULL_TREE)
7887 /* If this is a comparison of a real constant with a PLUS_EXPR
7888 or a MINUS_EXPR of a real constant, we can convert it into a
7889 comparison with a revised real constant as long as no overflow
7890 occurs when unsafe_math_optimizations are enabled. */
7891 if (flag_unsafe_math_optimizations
7892 && TREE_CODE (arg1) == REAL_CST
7893 && (TREE_CODE (arg0) == PLUS_EXPR
7894 || TREE_CODE (arg0) == MINUS_EXPR)
7895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7896 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7897 ? MINUS_EXPR : PLUS_EXPR,
7898 arg1, TREE_OPERAND (arg0, 1), 0))
7899 && ! TREE_CONSTANT_OVERFLOW (tem))
7900 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7902 /* Likewise, we can simplify a comparison of a real constant with
7903 a MINUS_EXPR whose first operand is also a real constant, i.e.
7904 (c1 - x) < c2 becomes x > c1-c2. */
7905 if (flag_unsafe_math_optimizations
7906 && TREE_CODE (arg1) == REAL_CST
7907 && TREE_CODE (arg0) == MINUS_EXPR
7908 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7909 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7911 && ! TREE_CONSTANT_OVERFLOW (tem))
7912 return fold_build2 (swap_tree_comparison (code), type,
7913 TREE_OPERAND (arg0, 1), tem);
7915 /* Fold comparisons against built-in math functions. */
7916 if (TREE_CODE (arg1) == REAL_CST
7917 && flag_unsafe_math_optimizations
7918 && ! flag_errno_math)
7920 enum built_in_function fcode = builtin_mathfn_code (arg0);
7922 if (fcode != END_BUILTINS)
7924 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7925 if (tem != NULL_TREE)
7931 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7932 if (TREE_CONSTANT (arg1)
7933 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7934 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7935 /* This optimization is invalid for ordered comparisons
7936 if CONST+INCR overflows or if foo+incr might overflow.
7937 This optimization is invalid for floating point due to rounding.
7938 For pointer types we assume overflow doesn't happen. */
7939 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7940 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7941 && (code == EQ_EXPR || code == NE_EXPR))))
7943 tree varop, newconst;
7945 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7947 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7948 arg1, TREE_OPERAND (arg0, 1));
7949 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7950 TREE_OPERAND (arg0, 0),
7951 TREE_OPERAND (arg0, 1));
7955 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7956 arg1, TREE_OPERAND (arg0, 1));
7957 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7958 TREE_OPERAND (arg0, 0),
7959 TREE_OPERAND (arg0, 1));
7963 /* If VAROP is a reference to a bitfield, we must mask
7964 the constant by the width of the field. */
7965 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7966 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7967 && host_integerp (DECL_SIZE (TREE_OPERAND
7968 (TREE_OPERAND (varop, 0), 1)), 1))
7970 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7971 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7972 tree folded_compare, shift;
7974 /* First check whether the comparison would come out
7975 always the same. If we don't do that we would
7976 change the meaning with the masking. */
7977 folded_compare = fold_build2 (code, type,
7978 TREE_OPERAND (varop, 0), arg1);
7979 if (TREE_CODE (folded_compare) == INTEGER_CST)
7980 return omit_one_operand (type, folded_compare, varop);
7982 shift = build_int_cst (NULL_TREE,
7983 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7984 shift = fold_convert (TREE_TYPE (varop), shift);
7985 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7987 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7991 return fold_build2 (code, type, varop, newconst);
7994 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7995 && (TREE_CODE (arg0) == NOP_EXPR
7996 || TREE_CODE (arg0) == CONVERT_EXPR))
7998 /* If we are widening one operand of an integer comparison,
7999 see if the other operand is similarly being widened. Perhaps we
8000 can do the comparison in the narrower type. */
8001 tem = fold_widened_comparison (code, type, arg0, arg1);
8005 /* Or if we are changing signedness. */
8006 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8011 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8012 constant, we can simplify it. */
8013 if (TREE_CODE (arg1) == INTEGER_CST
8014 && (TREE_CODE (arg0) == MIN_EXPR
8015 || TREE_CODE (arg0) == MAX_EXPR)
8016 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8018 tem = optimize_minmax_comparison (code, type, op0, op1);
8023 /* Simplify comparison of something with itself. (For IEEE
8024 floating-point, we can only do some of these simplifications.) */
8025 if (operand_equal_p (arg0, arg1, 0))
8030 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8031 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8032 return constant_boolean_node (1, type);
8037 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8038 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8039 return constant_boolean_node (1, type);
8040 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8043 /* For NE, we can only do this simplification if integer
8044 or we don't honor IEEE floating point NaNs. */
8045 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8046 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8048 /* ... fall through ... */
8051 return constant_boolean_node (0, type);
8057 /* If we are comparing an expression that just has comparisons
8058 of two integer values, arithmetic expressions of those comparisons,
8059 and constants, we can simplify it. There are only three cases
8060 to check: the two values can either be equal, the first can be
8061 greater, or the second can be greater. Fold the expression for
8062 those three values. Since each value must be 0 or 1, we have
8063 eight possibilities, each of which corresponds to the constant 0
8064 or 1 or one of the six possible comparisons.
8066 This handles common cases like (a > b) == 0 but also handles
8067 expressions like ((x > y) - (y > x)) > 0, which supposedly
8068 occur in macroized code. */
8070 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8072 tree cval1 = 0, cval2 = 0;
8075 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8076 /* Don't handle degenerate cases here; they should already
8077 have been handled anyway. */
8078 && cval1 != 0 && cval2 != 0
8079 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8080 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8081 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8082 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8083 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8084 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8085 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8087 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8088 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8090 /* We can't just pass T to eval_subst in case cval1 or cval2
8091 was the same as ARG1. */
8094 = fold_build2 (code, type,
8095 eval_subst (arg0, cval1, maxval,
8099 = fold_build2 (code, type,
8100 eval_subst (arg0, cval1, maxval,
8104 = fold_build2 (code, type,
8105 eval_subst (arg0, cval1, minval,
8109 /* All three of these results should be 0 or 1. Confirm they are.
8110 Then use those values to select the proper code to use. */
8112 if (TREE_CODE (high_result) == INTEGER_CST
8113 && TREE_CODE (equal_result) == INTEGER_CST
8114 && TREE_CODE (low_result) == INTEGER_CST)
8116 /* Make a 3-bit mask with the high-order bit being the
8117 value for `>', the next for '=', and the low for '<'. */
8118 switch ((integer_onep (high_result) * 4)
8119 + (integer_onep (equal_result) * 2)
8120 + integer_onep (low_result))
8124 return omit_one_operand (type, integer_zero_node, arg0);
8145 return omit_one_operand (type, integer_one_node, arg0);
8149 return save_expr (build2 (code, type, cval1, cval2));
8150 return fold_build2 (code, type, cval1, cval2);
8155 /* Fold a comparison of the address of COMPONENT_REFs with the same
8156 type and component to a comparison of the address of the base
8157 object. In short, &x->a OP &y->a to x OP y and
8158 &x->a OP &y.a to x OP &y */
8159 if (TREE_CODE (arg0) == ADDR_EXPR
8160 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8161 && TREE_CODE (arg1) == ADDR_EXPR
8162 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8164 tree cref0 = TREE_OPERAND (arg0, 0);
8165 tree cref1 = TREE_OPERAND (arg1, 0);
8166 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8168 tree op0 = TREE_OPERAND (cref0, 0);
8169 tree op1 = TREE_OPERAND (cref1, 0);
8170 return fold_build2 (code, type,
8171 build_fold_addr_expr (op0),
8172 build_fold_addr_expr (op1));
8176 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8177 into a single range test. */
8178 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8179 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8180 && TREE_CODE (arg1) == INTEGER_CST
8181 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8182 && !integer_zerop (TREE_OPERAND (arg0, 1))
8183 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8184 && !TREE_OVERFLOW (arg1))
8186 tem = fold_div_compare (code, type, arg0, arg1);
8187 if (tem != NULL_TREE)
8195 /* Subroutine of fold_binary. Optimize complex multiplications of the
8196 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8197 argument EXPR represents the expression "z" of type TYPE. */
8200 fold_mult_zconjz (tree type, tree expr)
8202 tree itype = TREE_TYPE (type);
8203 tree rpart, ipart, tem;
8205 if (TREE_CODE (expr) == COMPLEX_EXPR)
8207 rpart = TREE_OPERAND (expr, 0);
8208 ipart = TREE_OPERAND (expr, 1);
8210 else if (TREE_CODE (expr) == COMPLEX_CST)
8212 rpart = TREE_REALPART (expr);
8213 ipart = TREE_IMAGPART (expr);
8217 expr = save_expr (expr);
8218 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8219 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8222 rpart = save_expr (rpart);
8223 ipart = save_expr (ipart);
8224 tem = fold_build2 (PLUS_EXPR, itype,
8225 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8226 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8227 return fold_build2 (COMPLEX_EXPR, type, tem,
8228 fold_convert (itype, integer_zero_node));
8232 /* Fold a binary expression of code CODE and type TYPE with operands
8233 OP0 and OP1. Return the folded expression if folding is
8234 successful. Otherwise, return NULL_TREE. */
8237 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8239 enum tree_code_class kind = TREE_CODE_CLASS (code);
8240 tree arg0, arg1, tem;
8241 tree t1 = NULL_TREE;
8243 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8244 && TREE_CODE_LENGTH (code) == 2
8246 && op1 != NULL_TREE);
8251 /* Strip any conversions that don't change the mode. This is
8252 safe for every expression, except for a comparison expression
8253 because its signedness is derived from its operands. So, in
8254 the latter case, only strip conversions that don't change the
8257 Note that this is done as an internal manipulation within the
8258 constant folder, in order to find the simplest representation
8259 of the arguments so that their form can be studied. In any
8260 cases, the appropriate type conversions should be put back in
8261 the tree that will get out of the constant folder. */
8263 if (kind == tcc_comparison)
8265 STRIP_SIGN_NOPS (arg0);
8266 STRIP_SIGN_NOPS (arg1);
8274 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8275 constant but we can't do arithmetic on them. */
8276 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8277 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8278 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8279 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8281 if (kind == tcc_binary)
8282 tem = const_binop (code, arg0, arg1, 0);
8283 else if (kind == tcc_comparison)
8284 tem = fold_relational_const (code, type, arg0, arg1);
8288 if (tem != NULL_TREE)
8290 if (TREE_TYPE (tem) != type)
8291 tem = fold_convert (type, tem);
8296 /* If this is a commutative operation, and ARG0 is a constant, move it
8297 to ARG1 to reduce the number of tests below. */
8298 if (commutative_tree_code (code)
8299 && tree_swap_operands_p (arg0, arg1, true))
8300 return fold_build2 (code, type, op1, op0);
8302 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8304 First check for cases where an arithmetic operation is applied to a
8305 compound, conditional, or comparison operation. Push the arithmetic
8306 operation inside the compound or conditional to see if any folding
8307 can then be done. Convert comparison to conditional for this purpose.
8308 The also optimizes non-constant cases that used to be done in
8311 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8312 one of the operands is a comparison and the other is a comparison, a
8313 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8314 code below would make the expression more complex. Change it to a
8315 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8316 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8318 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8319 || code == EQ_EXPR || code == NE_EXPR)
8320 && ((truth_value_p (TREE_CODE (arg0))
8321 && (truth_value_p (TREE_CODE (arg1))
8322 || (TREE_CODE (arg1) == BIT_AND_EXPR
8323 && integer_onep (TREE_OPERAND (arg1, 1)))))
8324 || (truth_value_p (TREE_CODE (arg1))
8325 && (truth_value_p (TREE_CODE (arg0))
8326 || (TREE_CODE (arg0) == BIT_AND_EXPR
8327 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8329 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8330 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8333 fold_convert (boolean_type_node, arg0),
8334 fold_convert (boolean_type_node, arg1));
8336 if (code == EQ_EXPR)
8337 tem = invert_truthvalue (tem);
8339 return fold_convert (type, tem);
8342 if (TREE_CODE_CLASS (code) == tcc_binary
8343 || TREE_CODE_CLASS (code) == tcc_comparison)
8345 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8346 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8347 fold_build2 (code, type,
8348 TREE_OPERAND (arg0, 1), op1));
8349 if (TREE_CODE (arg1) == COMPOUND_EXPR
8350 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8351 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8352 fold_build2 (code, type,
8353 op0, TREE_OPERAND (arg1, 1)));
8355 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8357 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8359 /*cond_first_p=*/1);
8360 if (tem != NULL_TREE)
8364 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8366 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8368 /*cond_first_p=*/0);
8369 if (tem != NULL_TREE)
8377 /* A + (-B) -> A - B */
8378 if (TREE_CODE (arg1) == NEGATE_EXPR)
8379 return fold_build2 (MINUS_EXPR, type,
8380 fold_convert (type, arg0),
8381 fold_convert (type, TREE_OPERAND (arg1, 0)));
8382 /* (-A) + B -> B - A */
8383 if (TREE_CODE (arg0) == NEGATE_EXPR
8384 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8385 return fold_build2 (MINUS_EXPR, type,
8386 fold_convert (type, arg1),
8387 fold_convert (type, TREE_OPERAND (arg0, 0)));
8388 /* Convert ~A + 1 to -A. */
8389 if (INTEGRAL_TYPE_P (type)
8390 && TREE_CODE (arg0) == BIT_NOT_EXPR
8391 && integer_onep (arg1))
8392 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8394 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8396 if ((TREE_CODE (arg0) == MULT_EXPR
8397 || TREE_CODE (arg1) == MULT_EXPR)
8398 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8400 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8405 if (! FLOAT_TYPE_P (type))
8407 if (integer_zerop (arg1))
8408 return non_lvalue (fold_convert (type, arg0));
8410 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8411 with a constant, and the two constants have no bits in common,
8412 we should treat this as a BIT_IOR_EXPR since this may produce more
8414 if (TREE_CODE (arg0) == BIT_AND_EXPR
8415 && TREE_CODE (arg1) == BIT_AND_EXPR
8416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8417 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8418 && integer_zerop (const_binop (BIT_AND_EXPR,
8419 TREE_OPERAND (arg0, 1),
8420 TREE_OPERAND (arg1, 1), 0)))
8422 code = BIT_IOR_EXPR;
8426 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8427 (plus (plus (mult) (mult)) (foo)) so that we can
8428 take advantage of the factoring cases below. */
8429 if (((TREE_CODE (arg0) == PLUS_EXPR
8430 || TREE_CODE (arg0) == MINUS_EXPR)
8431 && TREE_CODE (arg1) == MULT_EXPR)
8432 || ((TREE_CODE (arg1) == PLUS_EXPR
8433 || TREE_CODE (arg1) == MINUS_EXPR)
8434 && TREE_CODE (arg0) == MULT_EXPR))
8436 tree parg0, parg1, parg, marg;
8437 enum tree_code pcode;
8439 if (TREE_CODE (arg1) == MULT_EXPR)
8440 parg = arg0, marg = arg1;
8442 parg = arg1, marg = arg0;
8443 pcode = TREE_CODE (parg);
8444 parg0 = TREE_OPERAND (parg, 0);
8445 parg1 = TREE_OPERAND (parg, 1);
8449 if (TREE_CODE (parg0) == MULT_EXPR
8450 && TREE_CODE (parg1) != MULT_EXPR)
8451 return fold_build2 (pcode, type,
8452 fold_build2 (PLUS_EXPR, type,
8453 fold_convert (type, parg0),
8454 fold_convert (type, marg)),
8455 fold_convert (type, parg1));
8456 if (TREE_CODE (parg0) != MULT_EXPR
8457 && TREE_CODE (parg1) == MULT_EXPR)
8458 return fold_build2 (PLUS_EXPR, type,
8459 fold_convert (type, parg0),
8460 fold_build2 (pcode, type,
8461 fold_convert (type, marg),
8466 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8467 of the array. Loop optimizer sometimes produce this type of
8469 if (TREE_CODE (arg0) == ADDR_EXPR)
8471 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8473 return fold_convert (type, tem);
8475 else if (TREE_CODE (arg1) == ADDR_EXPR)
8477 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8479 return fold_convert (type, tem);
8484 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8485 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8486 return non_lvalue (fold_convert (type, arg0));
8488 /* Likewise if the operands are reversed. */
8489 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8490 return non_lvalue (fold_convert (type, arg1));
8492 /* Convert X + -C into X - C. */
8493 if (TREE_CODE (arg1) == REAL_CST
8494 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8496 tem = fold_negate_const (arg1, type);
8497 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8498 return fold_build2 (MINUS_EXPR, type,
8499 fold_convert (type, arg0),
8500 fold_convert (type, tem));
8503 if (flag_unsafe_math_optimizations
8504 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8505 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8506 && (tem = distribute_real_division (code, type, arg0, arg1)))
8509 /* Convert x+x into x*2.0. */
8510 if (operand_equal_p (arg0, arg1, 0)
8511 && SCALAR_FLOAT_TYPE_P (type))
8512 return fold_build2 (MULT_EXPR, type, arg0,
8513 build_real (type, dconst2));
8515 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8516 if (flag_unsafe_math_optimizations
8517 && TREE_CODE (arg1) == PLUS_EXPR
8518 && TREE_CODE (arg0) != MULT_EXPR)
8520 tree tree10 = TREE_OPERAND (arg1, 0);
8521 tree tree11 = TREE_OPERAND (arg1, 1);
8522 if (TREE_CODE (tree11) == MULT_EXPR
8523 && TREE_CODE (tree10) == MULT_EXPR)
8526 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8527 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8530 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8531 if (flag_unsafe_math_optimizations
8532 && TREE_CODE (arg0) == PLUS_EXPR
8533 && TREE_CODE (arg1) != MULT_EXPR)
8535 tree tree00 = TREE_OPERAND (arg0, 0);
8536 tree tree01 = TREE_OPERAND (arg0, 1);
8537 if (TREE_CODE (tree01) == MULT_EXPR
8538 && TREE_CODE (tree00) == MULT_EXPR)
8541 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8542 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8548 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8549 is a rotate of A by C1 bits. */
8550 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8551 is a rotate of A by B bits. */
8553 enum tree_code code0, code1;
8554 code0 = TREE_CODE (arg0);
8555 code1 = TREE_CODE (arg1);
8556 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8557 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8558 && operand_equal_p (TREE_OPERAND (arg0, 0),
8559 TREE_OPERAND (arg1, 0), 0)
8560 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8562 tree tree01, tree11;
8563 enum tree_code code01, code11;
8565 tree01 = TREE_OPERAND (arg0, 1);
8566 tree11 = TREE_OPERAND (arg1, 1);
8567 STRIP_NOPS (tree01);
8568 STRIP_NOPS (tree11);
8569 code01 = TREE_CODE (tree01);
8570 code11 = TREE_CODE (tree11);
8571 if (code01 == INTEGER_CST
8572 && code11 == INTEGER_CST
8573 && TREE_INT_CST_HIGH (tree01) == 0
8574 && TREE_INT_CST_HIGH (tree11) == 0
8575 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8576 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8577 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8578 code0 == LSHIFT_EXPR ? tree01 : tree11);
8579 else if (code11 == MINUS_EXPR)
8581 tree tree110, tree111;
8582 tree110 = TREE_OPERAND (tree11, 0);
8583 tree111 = TREE_OPERAND (tree11, 1);
8584 STRIP_NOPS (tree110);
8585 STRIP_NOPS (tree111);
8586 if (TREE_CODE (tree110) == INTEGER_CST
8587 && 0 == compare_tree_int (tree110,
8589 (TREE_TYPE (TREE_OPERAND
8591 && operand_equal_p (tree01, tree111, 0))
8592 return build2 ((code0 == LSHIFT_EXPR
8595 type, TREE_OPERAND (arg0, 0), tree01);
8597 else if (code01 == MINUS_EXPR)
8599 tree tree010, tree011;
8600 tree010 = TREE_OPERAND (tree01, 0);
8601 tree011 = TREE_OPERAND (tree01, 1);
8602 STRIP_NOPS (tree010);
8603 STRIP_NOPS (tree011);
8604 if (TREE_CODE (tree010) == INTEGER_CST
8605 && 0 == compare_tree_int (tree010,
8607 (TREE_TYPE (TREE_OPERAND
8609 && operand_equal_p (tree11, tree011, 0))
8610 return build2 ((code0 != LSHIFT_EXPR
8613 type, TREE_OPERAND (arg0, 0), tree11);
8619 /* In most languages, can't associate operations on floats through
8620 parentheses. Rather than remember where the parentheses were, we
8621 don't associate floats at all, unless the user has specified
8622 -funsafe-math-optimizations. */
8624 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8626 tree var0, con0, lit0, minus_lit0;
8627 tree var1, con1, lit1, minus_lit1;
8629 /* Split both trees into variables, constants, and literals. Then
8630 associate each group together, the constants with literals,
8631 then the result with variables. This increases the chances of
8632 literals being recombined later and of generating relocatable
8633 expressions for the sum of a constant and literal. */
8634 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8635 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8636 code == MINUS_EXPR);
8638 /* Only do something if we found more than two objects. Otherwise,
8639 nothing has changed and we risk infinite recursion. */
8640 if (2 < ((var0 != 0) + (var1 != 0)
8641 + (con0 != 0) + (con1 != 0)
8642 + (lit0 != 0) + (lit1 != 0)
8643 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8645 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8646 if (code == MINUS_EXPR)
8649 var0 = associate_trees (var0, var1, code, type);
8650 con0 = associate_trees (con0, con1, code, type);
8651 lit0 = associate_trees (lit0, lit1, code, type);
8652 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8654 /* Preserve the MINUS_EXPR if the negative part of the literal is
8655 greater than the positive part. Otherwise, the multiplicative
8656 folding code (i.e extract_muldiv) may be fooled in case
8657 unsigned constants are subtracted, like in the following
8658 example: ((X*2 + 4) - 8U)/2. */
8659 if (minus_lit0 && lit0)
8661 if (TREE_CODE (lit0) == INTEGER_CST
8662 && TREE_CODE (minus_lit0) == INTEGER_CST
8663 && tree_int_cst_lt (lit0, minus_lit0))
8665 minus_lit0 = associate_trees (minus_lit0, lit0,
8671 lit0 = associate_trees (lit0, minus_lit0,
8679 return fold_convert (type,
8680 associate_trees (var0, minus_lit0,
8684 con0 = associate_trees (con0, minus_lit0,
8686 return fold_convert (type,
8687 associate_trees (var0, con0,
8692 con0 = associate_trees (con0, lit0, code, type);
8693 return fold_convert (type, associate_trees (var0, con0,
8701 /* A - (-B) -> A + B */
8702 if (TREE_CODE (arg1) == NEGATE_EXPR)
8703 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8704 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8705 if (TREE_CODE (arg0) == NEGATE_EXPR
8706 && (FLOAT_TYPE_P (type)
8707 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8708 && negate_expr_p (arg1)
8709 && reorder_operands_p (arg0, arg1))
8710 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8711 TREE_OPERAND (arg0, 0));
8712 /* Convert -A - 1 to ~A. */
8713 if (INTEGRAL_TYPE_P (type)
8714 && TREE_CODE (arg0) == NEGATE_EXPR
8715 && integer_onep (arg1))
8716 return fold_build1 (BIT_NOT_EXPR, type,
8717 fold_convert (type, TREE_OPERAND (arg0, 0)));
8719 /* Convert -1 - A to ~A. */
8720 if (INTEGRAL_TYPE_P (type)
8721 && integer_all_onesp (arg0))
8722 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8724 if (! FLOAT_TYPE_P (type))
8726 if (integer_zerop (arg0))
8727 return negate_expr (fold_convert (type, arg1));
8728 if (integer_zerop (arg1))
8729 return non_lvalue (fold_convert (type, arg0));
8731 /* Fold A - (A & B) into ~B & A. */
8732 if (!TREE_SIDE_EFFECTS (arg0)
8733 && TREE_CODE (arg1) == BIT_AND_EXPR)
8735 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8736 return fold_build2 (BIT_AND_EXPR, type,
8737 fold_build1 (BIT_NOT_EXPR, type,
8738 TREE_OPERAND (arg1, 0)),
8740 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8741 return fold_build2 (BIT_AND_EXPR, type,
8742 fold_build1 (BIT_NOT_EXPR, type,
8743 TREE_OPERAND (arg1, 1)),
8747 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8748 any power of 2 minus 1. */
8749 if (TREE_CODE (arg0) == BIT_AND_EXPR
8750 && TREE_CODE (arg1) == BIT_AND_EXPR
8751 && operand_equal_p (TREE_OPERAND (arg0, 0),
8752 TREE_OPERAND (arg1, 0), 0))
8754 tree mask0 = TREE_OPERAND (arg0, 1);
8755 tree mask1 = TREE_OPERAND (arg1, 1);
8756 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8758 if (operand_equal_p (tem, mask1, 0))
8760 tem = fold_build2 (BIT_XOR_EXPR, type,
8761 TREE_OPERAND (arg0, 0), mask1);
8762 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8767 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8768 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8769 return non_lvalue (fold_convert (type, arg0));
8771 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8772 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8773 (-ARG1 + ARG0) reduces to -ARG1. */
8774 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8775 return negate_expr (fold_convert (type, arg1));
8777 /* Fold &x - &x. This can happen from &x.foo - &x.
8778 This is unsafe for certain floats even in non-IEEE formats.
8779 In IEEE, it is unsafe because it does wrong for NaNs.
8780 Also note that operand_equal_p is always false if an operand
8783 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8784 && operand_equal_p (arg0, arg1, 0))
8785 return fold_convert (type, integer_zero_node);
8787 /* A - B -> A + (-B) if B is easily negatable. */
8788 if (negate_expr_p (arg1)
8789 && ((FLOAT_TYPE_P (type)
8790 /* Avoid this transformation if B is a positive REAL_CST. */
8791 && (TREE_CODE (arg1) != REAL_CST
8792 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8793 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8794 return fold_build2 (PLUS_EXPR, type,
8795 fold_convert (type, arg0),
8796 fold_convert (type, negate_expr (arg1)));
8798 /* Try folding difference of addresses. */
8802 if ((TREE_CODE (arg0) == ADDR_EXPR
8803 || TREE_CODE (arg1) == ADDR_EXPR)
8804 && ptr_difference_const (arg0, arg1, &diff))
8805 return build_int_cst_type (type, diff);
8808 /* Fold &a[i] - &a[j] to i-j. */
8809 if (TREE_CODE (arg0) == ADDR_EXPR
8810 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8811 && TREE_CODE (arg1) == ADDR_EXPR
8812 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8814 tree aref0 = TREE_OPERAND (arg0, 0);
8815 tree aref1 = TREE_OPERAND (arg1, 0);
8816 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8817 TREE_OPERAND (aref1, 0), 0))
8819 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8820 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8821 tree esz = array_ref_element_size (aref0);
8822 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8823 return fold_build2 (MULT_EXPR, type, diff,
8824 fold_convert (type, esz));
8829 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8830 of the array. Loop optimizer sometimes produce this type of
8832 if (TREE_CODE (arg0) == ADDR_EXPR)
8834 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8836 return fold_convert (type, tem);
8839 if (flag_unsafe_math_optimizations
8840 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8841 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8842 && (tem = distribute_real_division (code, type, arg0, arg1)))
8845 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8847 if ((TREE_CODE (arg0) == MULT_EXPR
8848 || TREE_CODE (arg1) == MULT_EXPR)
8849 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8851 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8859 /* (-A) * (-B) -> A * B */
8860 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8861 return fold_build2 (MULT_EXPR, type,
8862 TREE_OPERAND (arg0, 0),
8863 negate_expr (arg1));
8864 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8865 return fold_build2 (MULT_EXPR, type,
8867 TREE_OPERAND (arg1, 0));
8869 if (! FLOAT_TYPE_P (type))
8871 if (integer_zerop (arg1))
8872 return omit_one_operand (type, arg1, arg0);
8873 if (integer_onep (arg1))
8874 return non_lvalue (fold_convert (type, arg0));
8875 /* Transform x * -1 into -x. */
8876 if (integer_all_onesp (arg1))
8877 return fold_convert (type, negate_expr (arg0));
8879 /* (a * (1 << b)) is (a << b) */
8880 if (TREE_CODE (arg1) == LSHIFT_EXPR
8881 && integer_onep (TREE_OPERAND (arg1, 0)))
8882 return fold_build2 (LSHIFT_EXPR, type, arg0,
8883 TREE_OPERAND (arg1, 1));
8884 if (TREE_CODE (arg0) == LSHIFT_EXPR
8885 && integer_onep (TREE_OPERAND (arg0, 0)))
8886 return fold_build2 (LSHIFT_EXPR, type, arg1,
8887 TREE_OPERAND (arg0, 1));
8889 if (TREE_CODE (arg1) == INTEGER_CST
8890 && 0 != (tem = extract_muldiv (op0,
8891 fold_convert (type, arg1),
8893 return fold_convert (type, tem);
8895 /* Optimize z * conj(z) for integer complex numbers. */
8896 if (TREE_CODE (arg0) == CONJ_EXPR
8897 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8898 return fold_mult_zconjz (type, arg1);
8899 if (TREE_CODE (arg1) == CONJ_EXPR
8900 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8901 return fold_mult_zconjz (type, arg0);
8905 /* Maybe fold x * 0 to 0. The expressions aren't the same
8906 when x is NaN, since x * 0 is also NaN. Nor are they the
8907 same in modes with signed zeros, since multiplying a
8908 negative value by 0 gives -0, not +0. */
8909 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8910 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8911 && real_zerop (arg1))
8912 return omit_one_operand (type, arg1, arg0);
8913 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8914 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8915 && real_onep (arg1))
8916 return non_lvalue (fold_convert (type, arg0));
8918 /* Transform x * -1.0 into -x. */
8919 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8920 && real_minus_onep (arg1))
8921 return fold_convert (type, negate_expr (arg0));
8923 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8924 if (flag_unsafe_math_optimizations
8925 && TREE_CODE (arg0) == RDIV_EXPR
8926 && TREE_CODE (arg1) == REAL_CST
8927 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8929 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8932 return fold_build2 (RDIV_EXPR, type, tem,
8933 TREE_OPERAND (arg0, 1));
8936 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8937 if (operand_equal_p (arg0, arg1, 0))
8939 tree tem = fold_strip_sign_ops (arg0);
8940 if (tem != NULL_TREE)
8942 tem = fold_convert (type, tem);
8943 return fold_build2 (MULT_EXPR, type, tem, tem);
8947 /* Optimize z * conj(z) for floating point complex numbers.
8948 Guarded by flag_unsafe_math_optimizations as non-finite
8949 imaginary components don't produce scalar results. */
8950 if (flag_unsafe_math_optimizations
8951 && TREE_CODE (arg0) == CONJ_EXPR
8952 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8953 return fold_mult_zconjz (type, arg1);
8954 if (flag_unsafe_math_optimizations
8955 && TREE_CODE (arg1) == CONJ_EXPR
8956 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8957 return fold_mult_zconjz (type, arg0);
8959 if (flag_unsafe_math_optimizations)
8961 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8962 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8964 /* Optimizations of root(...)*root(...). */
8965 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8967 tree rootfn, arg, arglist;
8968 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8969 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8971 /* Optimize sqrt(x)*sqrt(x) as x. */
8972 if (BUILTIN_SQRT_P (fcode0)
8973 && operand_equal_p (arg00, arg10, 0)
8974 && ! HONOR_SNANS (TYPE_MODE (type)))
8977 /* Optimize root(x)*root(y) as root(x*y). */
8978 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8979 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8980 arglist = build_tree_list (NULL_TREE, arg);
8981 return build_function_call_expr (rootfn, arglist);
8984 /* Optimize expN(x)*expN(y) as expN(x+y). */
8985 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8987 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8988 tree arg = fold_build2 (PLUS_EXPR, type,
8989 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8990 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8991 tree arglist = build_tree_list (NULL_TREE, arg);
8992 return build_function_call_expr (expfn, arglist);
8995 /* Optimizations of pow(...)*pow(...). */
8996 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8997 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8998 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9000 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9001 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9003 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9004 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9007 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9008 if (operand_equal_p (arg01, arg11, 0))
9010 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9011 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9012 tree arglist = tree_cons (NULL_TREE, arg,
9013 build_tree_list (NULL_TREE,
9015 return build_function_call_expr (powfn, arglist);
9018 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9019 if (operand_equal_p (arg00, arg10, 0))
9021 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9022 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9023 tree arglist = tree_cons (NULL_TREE, arg00,
9024 build_tree_list (NULL_TREE,
9026 return build_function_call_expr (powfn, arglist);
9030 /* Optimize tan(x)*cos(x) as sin(x). */
9031 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9032 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9033 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9034 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9035 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9036 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9037 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9038 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9040 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9042 if (sinfn != NULL_TREE)
9043 return build_function_call_expr (sinfn,
9044 TREE_OPERAND (arg0, 1));
9047 /* Optimize x*pow(x,c) as pow(x,c+1). */
9048 if (fcode1 == BUILT_IN_POW
9049 || fcode1 == BUILT_IN_POWF
9050 || fcode1 == BUILT_IN_POWL)
9052 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9053 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9055 if (TREE_CODE (arg11) == REAL_CST
9056 && ! TREE_CONSTANT_OVERFLOW (arg11)
9057 && operand_equal_p (arg0, arg10, 0))
9059 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9063 c = TREE_REAL_CST (arg11);
9064 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9065 arg = build_real (type, c);
9066 arglist = build_tree_list (NULL_TREE, arg);
9067 arglist = tree_cons (NULL_TREE, arg0, arglist);
9068 return build_function_call_expr (powfn, arglist);
9072 /* Optimize pow(x,c)*x as pow(x,c+1). */
9073 if (fcode0 == BUILT_IN_POW
9074 || fcode0 == BUILT_IN_POWF
9075 || fcode0 == BUILT_IN_POWL)
9077 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9078 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9080 if (TREE_CODE (arg01) == REAL_CST
9081 && ! TREE_CONSTANT_OVERFLOW (arg01)
9082 && operand_equal_p (arg1, arg00, 0))
9084 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9088 c = TREE_REAL_CST (arg01);
9089 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9090 arg = build_real (type, c);
9091 arglist = build_tree_list (NULL_TREE, arg);
9092 arglist = tree_cons (NULL_TREE, arg1, arglist);
9093 return build_function_call_expr (powfn, arglist);
9097 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9099 && operand_equal_p (arg0, arg1, 0))
9101 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9105 tree arg = build_real (type, dconst2);
9106 tree arglist = build_tree_list (NULL_TREE, arg);
9107 arglist = tree_cons (NULL_TREE, arg0, arglist);
9108 return build_function_call_expr (powfn, arglist);
9117 if (integer_all_onesp (arg1))
9118 return omit_one_operand (type, arg1, arg0);
9119 if (integer_zerop (arg1))
9120 return non_lvalue (fold_convert (type, arg0));
9121 if (operand_equal_p (arg0, arg1, 0))
9122 return non_lvalue (fold_convert (type, arg0));
9125 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9126 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9128 t1 = build_int_cst (type, -1);
9129 t1 = force_fit_type (t1, 0, false, false);
9130 return omit_one_operand (type, t1, arg1);
9134 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9135 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9137 t1 = build_int_cst (type, -1);
9138 t1 = force_fit_type (t1, 0, false, false);
9139 return omit_one_operand (type, t1, arg0);
9142 /* Canonicalize (X & C1) | C2. */
9143 if (TREE_CODE (arg0) == BIT_AND_EXPR
9144 && TREE_CODE (arg1) == INTEGER_CST
9145 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9147 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9148 int width = TYPE_PRECISION (type);
9149 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9150 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9151 hi2 = TREE_INT_CST_HIGH (arg1);
9152 lo2 = TREE_INT_CST_LOW (arg1);
9154 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9155 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9156 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9158 if (width > HOST_BITS_PER_WIDE_INT)
9160 mhi = (unsigned HOST_WIDE_INT) -1
9161 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9167 mlo = (unsigned HOST_WIDE_INT) -1
9168 >> (HOST_BITS_PER_WIDE_INT - width);
9171 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9172 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9173 return fold_build2 (BIT_IOR_EXPR, type,
9174 TREE_OPERAND (arg0, 0), arg1);
9176 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9179 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9180 return fold_build2 (BIT_IOR_EXPR, type,
9181 fold_build2 (BIT_AND_EXPR, type,
9182 TREE_OPERAND (arg0, 0),
9183 build_int_cst_wide (type,
9189 /* (X & Y) | Y is (X, Y). */
9190 if (TREE_CODE (arg0) == BIT_AND_EXPR
9191 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9192 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9193 /* (X & Y) | X is (Y, X). */
9194 if (TREE_CODE (arg0) == BIT_AND_EXPR
9195 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9196 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9197 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9198 /* X | (X & Y) is (Y, X). */
9199 if (TREE_CODE (arg1) == BIT_AND_EXPR
9200 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9201 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9202 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9203 /* X | (Y & X) is (Y, X). */
9204 if (TREE_CODE (arg1) == BIT_AND_EXPR
9205 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9206 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9207 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9209 t1 = distribute_bit_expr (code, type, arg0, arg1);
9210 if (t1 != NULL_TREE)
9213 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9215 This results in more efficient code for machines without a NAND
9216 instruction. Combine will canonicalize to the first form
9217 which will allow use of NAND instructions provided by the
9218 backend if they exist. */
9219 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9220 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9222 return fold_build1 (BIT_NOT_EXPR, type,
9223 build2 (BIT_AND_EXPR, type,
9224 TREE_OPERAND (arg0, 0),
9225 TREE_OPERAND (arg1, 0)));
9228 /* See if this can be simplified into a rotate first. If that
9229 is unsuccessful continue in the association code. */
9233 if (integer_zerop (arg1))
9234 return non_lvalue (fold_convert (type, arg0));
9235 if (integer_all_onesp (arg1))
9236 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9237 if (operand_equal_p (arg0, arg1, 0))
9238 return omit_one_operand (type, integer_zero_node, arg0);
9241 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9242 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9244 t1 = build_int_cst (type, -1);
9245 t1 = force_fit_type (t1, 0, false, false);
9246 return omit_one_operand (type, t1, arg1);
9250 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9251 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9253 t1 = build_int_cst (type, -1);
9254 t1 = force_fit_type (t1, 0, false, false);
9255 return omit_one_operand (type, t1, arg0);
9258 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9259 with a constant, and the two constants have no bits in common,
9260 we should treat this as a BIT_IOR_EXPR since this may produce more
9262 if (TREE_CODE (arg0) == BIT_AND_EXPR
9263 && TREE_CODE (arg1) == BIT_AND_EXPR
9264 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9265 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9266 && integer_zerop (const_binop (BIT_AND_EXPR,
9267 TREE_OPERAND (arg0, 1),
9268 TREE_OPERAND (arg1, 1), 0)))
9270 code = BIT_IOR_EXPR;
9274 /* (X | Y) ^ X -> Y & ~ X*/
9275 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9276 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9278 tree t2 = TREE_OPERAND (arg0, 1);
9279 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9281 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9282 fold_convert (type, t1));
9286 /* (Y | X) ^ X -> Y & ~ X*/
9287 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9288 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9290 tree t2 = TREE_OPERAND (arg0, 0);
9291 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9293 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9294 fold_convert (type, t1));
9298 /* X ^ (X | Y) -> Y & ~ X*/
9299 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9300 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9302 tree t2 = TREE_OPERAND (arg1, 1);
9303 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9305 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9306 fold_convert (type, t1));
9310 /* X ^ (Y | X) -> Y & ~ X*/
9311 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9312 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9314 tree t2 = TREE_OPERAND (arg1, 0);
9315 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9317 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9318 fold_convert (type, t1));
9322 /* Convert ~X ^ ~Y to X ^ Y. */
9323 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9324 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9325 return fold_build2 (code, type,
9326 fold_convert (type, TREE_OPERAND (arg0, 0)),
9327 fold_convert (type, TREE_OPERAND (arg1, 0)));
9329 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9330 if (TREE_CODE (arg0) == BIT_AND_EXPR
9331 && integer_onep (TREE_OPERAND (arg0, 1))
9332 && integer_onep (arg1))
9333 return fold_build2 (EQ_EXPR, type, arg0,
9334 build_int_cst (TREE_TYPE (arg0), 0));
9336 /* Fold (X & Y) ^ Y as ~X & Y. */
9337 if (TREE_CODE (arg0) == BIT_AND_EXPR
9338 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9340 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9341 return fold_build2 (BIT_AND_EXPR, type,
9342 fold_build1 (BIT_NOT_EXPR, type, tem),
9343 fold_convert (type, arg1));
9345 /* Fold (X & Y) ^ X as ~Y & X. */
9346 if (TREE_CODE (arg0) == BIT_AND_EXPR
9347 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9348 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9350 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9351 return fold_build2 (BIT_AND_EXPR, type,
9352 fold_build1 (BIT_NOT_EXPR, type, tem),
9353 fold_convert (type, arg1));
9355 /* Fold X ^ (X & Y) as X & ~Y. */
9356 if (TREE_CODE (arg1) == BIT_AND_EXPR
9357 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9359 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9360 return fold_build2 (BIT_AND_EXPR, type,
9361 fold_convert (type, arg0),
9362 fold_build1 (BIT_NOT_EXPR, type, tem));
9364 /* Fold X ^ (Y & X) as ~Y & X. */
9365 if (TREE_CODE (arg1) == BIT_AND_EXPR
9366 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9367 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9369 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9370 return fold_build2 (BIT_AND_EXPR, type,
9371 fold_build1 (BIT_NOT_EXPR, type, tem),
9372 fold_convert (type, arg0));
9375 /* See if this can be simplified into a rotate first. If that
9376 is unsuccessful continue in the association code. */
9380 if (integer_all_onesp (arg1))
9381 return non_lvalue (fold_convert (type, arg0));
9382 if (integer_zerop (arg1))
9383 return omit_one_operand (type, arg1, arg0);
9384 if (operand_equal_p (arg0, arg1, 0))
9385 return non_lvalue (fold_convert (type, arg0));
9387 /* ~X & X is always zero. */
9388 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9389 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9390 return omit_one_operand (type, integer_zero_node, arg1);
9392 /* X & ~X is always zero. */
9393 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9394 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9395 return omit_one_operand (type, integer_zero_node, arg0);
9397 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9398 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9399 && TREE_CODE (arg1) == INTEGER_CST
9400 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9401 return fold_build2 (BIT_IOR_EXPR, type,
9402 fold_build2 (BIT_AND_EXPR, type,
9403 TREE_OPERAND (arg0, 0), arg1),
9404 fold_build2 (BIT_AND_EXPR, type,
9405 TREE_OPERAND (arg0, 1), arg1));
9407 /* (X | Y) & Y is (X, Y). */
9408 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9409 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9410 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9411 /* (X | Y) & X is (Y, X). */
9412 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9413 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9414 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9415 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9416 /* X & (X | Y) is (Y, X). */
9417 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9418 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9419 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9420 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9421 /* X & (Y | X) is (Y, X). */
9422 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9423 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9424 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9425 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9427 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9428 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9429 && integer_onep (TREE_OPERAND (arg0, 1))
9430 && integer_onep (arg1))
9432 tem = TREE_OPERAND (arg0, 0);
9433 return fold_build2 (EQ_EXPR, type,
9434 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9435 build_int_cst (TREE_TYPE (tem), 1)),
9436 build_int_cst (TREE_TYPE (tem), 0));
9438 /* Fold ~X & 1 as (X & 1) == 0. */
9439 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9440 && integer_onep (arg1))
9442 tem = TREE_OPERAND (arg0, 0);
9443 return fold_build2 (EQ_EXPR, type,
9444 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9445 build_int_cst (TREE_TYPE (tem), 1)),
9446 build_int_cst (TREE_TYPE (tem), 0));
9449 /* Fold (X ^ Y) & Y as ~X & Y. */
9450 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9451 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9453 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9454 return fold_build2 (BIT_AND_EXPR, type,
9455 fold_build1 (BIT_NOT_EXPR, type, tem),
9456 fold_convert (type, arg1));
9458 /* Fold (X ^ Y) & X as ~Y & X. */
9459 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9460 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9461 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9463 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9464 return fold_build2 (BIT_AND_EXPR, type,
9465 fold_build1 (BIT_NOT_EXPR, type, tem),
9466 fold_convert (type, arg1));
9468 /* Fold X & (X ^ Y) as X & ~Y. */
9469 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9470 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9472 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9473 return fold_build2 (BIT_AND_EXPR, type,
9474 fold_convert (type, arg0),
9475 fold_build1 (BIT_NOT_EXPR, type, tem));
9477 /* Fold X & (Y ^ X) as ~Y & X. */
9478 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9479 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9480 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9482 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9483 return fold_build2 (BIT_AND_EXPR, type,
9484 fold_build1 (BIT_NOT_EXPR, type, tem),
9485 fold_convert (type, arg0));
9488 t1 = distribute_bit_expr (code, type, arg0, arg1);
9489 if (t1 != NULL_TREE)
9491 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9492 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9493 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9496 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9498 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9499 && (~TREE_INT_CST_LOW (arg1)
9500 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9501 return fold_convert (type, TREE_OPERAND (arg0, 0));
9504 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9506 This results in more efficient code for machines without a NOR
9507 instruction. Combine will canonicalize to the first form
9508 which will allow use of NOR instructions provided by the
9509 backend if they exist. */
9510 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9511 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9513 return fold_build1 (BIT_NOT_EXPR, type,
9514 build2 (BIT_IOR_EXPR, type,
9515 TREE_OPERAND (arg0, 0),
9516 TREE_OPERAND (arg1, 0)));
9522 /* Don't touch a floating-point divide by zero unless the mode
9523 of the constant can represent infinity. */
9524 if (TREE_CODE (arg1) == REAL_CST
9525 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9526 && real_zerop (arg1))
9529 /* Optimize A / A to 1.0 if we don't care about
9530 NaNs or Infinities. Skip the transformation
9531 for non-real operands. */
9532 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9533 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9534 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9535 && operand_equal_p (arg0, arg1, 0))
9537 tree r = build_real (TREE_TYPE (arg0), dconst1);
9539 return omit_two_operands (type, r, arg0, arg1);
9542 /* The complex version of the above A / A optimization. */
9543 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9544 && operand_equal_p (arg0, arg1, 0))
9546 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9547 if (! HONOR_NANS (TYPE_MODE (elem_type))
9548 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9550 tree r = build_real (elem_type, dconst1);
9551 /* omit_two_operands will call fold_convert for us. */
9552 return omit_two_operands (type, r, arg0, arg1);
9556 /* (-A) / (-B) -> A / B */
9557 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9558 return fold_build2 (RDIV_EXPR, type,
9559 TREE_OPERAND (arg0, 0),
9560 negate_expr (arg1));
9561 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9562 return fold_build2 (RDIV_EXPR, type,
9564 TREE_OPERAND (arg1, 0));
9566 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9567 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9568 && real_onep (arg1))
9569 return non_lvalue (fold_convert (type, arg0));
9571 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9572 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9573 && real_minus_onep (arg1))
9574 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9576 /* If ARG1 is a constant, we can convert this to a multiply by the
9577 reciprocal. This does not have the same rounding properties,
9578 so only do this if -funsafe-math-optimizations. We can actually
9579 always safely do it if ARG1 is a power of two, but it's hard to
9580 tell if it is or not in a portable manner. */
9581 if (TREE_CODE (arg1) == REAL_CST)
9583 if (flag_unsafe_math_optimizations
9584 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9586 return fold_build2 (MULT_EXPR, type, arg0, tem);
9587 /* Find the reciprocal if optimizing and the result is exact. */
9591 r = TREE_REAL_CST (arg1);
9592 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9594 tem = build_real (type, r);
9595 return fold_build2 (MULT_EXPR, type,
9596 fold_convert (type, arg0), tem);
9600 /* Convert A/B/C to A/(B*C). */
9601 if (flag_unsafe_math_optimizations
9602 && TREE_CODE (arg0) == RDIV_EXPR)
9603 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9604 fold_build2 (MULT_EXPR, type,
9605 TREE_OPERAND (arg0, 1), arg1));
9607 /* Convert A/(B/C) to (A/B)*C. */
9608 if (flag_unsafe_math_optimizations
9609 && TREE_CODE (arg1) == RDIV_EXPR)
9610 return fold_build2 (MULT_EXPR, type,
9611 fold_build2 (RDIV_EXPR, type, arg0,
9612 TREE_OPERAND (arg1, 0)),
9613 TREE_OPERAND (arg1, 1));
9615 /* Convert C1/(X*C2) into (C1/C2)/X. */
9616 if (flag_unsafe_math_optimizations
9617 && TREE_CODE (arg1) == MULT_EXPR
9618 && TREE_CODE (arg0) == REAL_CST
9619 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9621 tree tem = const_binop (RDIV_EXPR, arg0,
9622 TREE_OPERAND (arg1, 1), 0);
9624 return fold_build2 (RDIV_EXPR, type, tem,
9625 TREE_OPERAND (arg1, 0));
9628 if (flag_unsafe_math_optimizations)
9630 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9631 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9633 /* Optimize sin(x)/cos(x) as tan(x). */
9634 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9635 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9636 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9637 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9638 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9640 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9642 if (tanfn != NULL_TREE)
9643 return build_function_call_expr (tanfn,
9644 TREE_OPERAND (arg0, 1));
9647 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9648 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9649 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9650 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9651 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9652 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9654 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9656 if (tanfn != NULL_TREE)
9658 tree tmp = TREE_OPERAND (arg0, 1);
9659 tmp = build_function_call_expr (tanfn, tmp);
9660 return fold_build2 (RDIV_EXPR, type,
9661 build_real (type, dconst1), tmp);
9665 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9666 NaNs or Infinities. */
9667 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9668 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9669 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9671 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9672 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9674 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9675 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9676 && operand_equal_p (arg00, arg01, 0))
9678 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9680 if (cosfn != NULL_TREE)
9681 return build_function_call_expr (cosfn,
9682 TREE_OPERAND (arg0, 1));
9686 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9687 NaNs or Infinities. */
9688 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9689 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9690 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9692 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9693 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9695 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9696 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9697 && operand_equal_p (arg00, arg01, 0))
9699 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9701 if (cosfn != NULL_TREE)
9703 tree tmp = TREE_OPERAND (arg0, 1);
9704 tmp = build_function_call_expr (cosfn, tmp);
9705 return fold_build2 (RDIV_EXPR, type,
9706 build_real (type, dconst1),
9712 /* Optimize pow(x,c)/x as pow(x,c-1). */
9713 if (fcode0 == BUILT_IN_POW
9714 || fcode0 == BUILT_IN_POWF
9715 || fcode0 == BUILT_IN_POWL)
9717 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9718 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9719 if (TREE_CODE (arg01) == REAL_CST
9720 && ! TREE_CONSTANT_OVERFLOW (arg01)
9721 && operand_equal_p (arg1, arg00, 0))
9723 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9727 c = TREE_REAL_CST (arg01);
9728 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9729 arg = build_real (type, c);
9730 arglist = build_tree_list (NULL_TREE, arg);
9731 arglist = tree_cons (NULL_TREE, arg1, arglist);
9732 return build_function_call_expr (powfn, arglist);
9736 /* Optimize x/expN(y) into x*expN(-y). */
9737 if (BUILTIN_EXPONENT_P (fcode1))
9739 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9740 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9741 tree arglist = build_tree_list (NULL_TREE,
9742 fold_convert (type, arg));
9743 arg1 = build_function_call_expr (expfn, arglist);
9744 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9747 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9748 if (fcode1 == BUILT_IN_POW
9749 || fcode1 == BUILT_IN_POWF
9750 || fcode1 == BUILT_IN_POWL)
9752 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9753 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9754 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9755 tree neg11 = fold_convert (type, negate_expr (arg11));
9756 tree arglist = tree_cons(NULL_TREE, arg10,
9757 build_tree_list (NULL_TREE, neg11));
9758 arg1 = build_function_call_expr (powfn, arglist);
9759 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9764 case TRUNC_DIV_EXPR:
9765 case FLOOR_DIV_EXPR:
9766 /* Simplify A / (B << N) where A and B are positive and B is
9767 a power of 2, to A >> (N + log2(B)). */
9768 if (TREE_CODE (arg1) == LSHIFT_EXPR
9769 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9771 tree sval = TREE_OPERAND (arg1, 0);
9772 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9774 tree sh_cnt = TREE_OPERAND (arg1, 1);
9775 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9777 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9778 sh_cnt, build_int_cst (NULL_TREE, pow2));
9779 return fold_build2 (RSHIFT_EXPR, type,
9780 fold_convert (type, arg0), sh_cnt);
9785 case ROUND_DIV_EXPR:
9787 case EXACT_DIV_EXPR:
9788 if (integer_onep (arg1))
9789 return non_lvalue (fold_convert (type, arg0));
9790 if (integer_zerop (arg1))
9793 if (!TYPE_UNSIGNED (type)
9794 && TREE_CODE (arg1) == INTEGER_CST
9795 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9796 && TREE_INT_CST_HIGH (arg1) == -1)
9797 return fold_convert (type, negate_expr (arg0));
9799 /* Convert -A / -B to A / B when the type is signed and overflow is
9801 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9802 && TREE_CODE (arg0) == NEGATE_EXPR
9803 && negate_expr_p (arg1))
9804 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9805 negate_expr (arg1));
9806 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9807 && TREE_CODE (arg1) == NEGATE_EXPR
9808 && negate_expr_p (arg0))
9809 return fold_build2 (code, type, negate_expr (arg0),
9810 TREE_OPERAND (arg1, 0));
9812 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9813 operation, EXACT_DIV_EXPR.
9815 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9816 At one time others generated faster code, it's not clear if they do
9817 after the last round to changes to the DIV code in expmed.c. */
9818 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9819 && multiple_of_p (type, arg0, arg1))
9820 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9822 if (TREE_CODE (arg1) == INTEGER_CST
9823 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9824 return fold_convert (type, tem);
9829 case FLOOR_MOD_EXPR:
9830 case ROUND_MOD_EXPR:
9831 case TRUNC_MOD_EXPR:
9832 /* X % 1 is always zero, but be sure to preserve any side
9834 if (integer_onep (arg1))
9835 return omit_one_operand (type, integer_zero_node, arg0);
9837 /* X % 0, return X % 0 unchanged so that we can get the
9838 proper warnings and errors. */
9839 if (integer_zerop (arg1))
9842 /* 0 % X is always zero, but be sure to preserve any side
9843 effects in X. Place this after checking for X == 0. */
9844 if (integer_zerop (arg0))
9845 return omit_one_operand (type, integer_zero_node, arg1);
9847 /* X % -1 is zero. */
9848 if (!TYPE_UNSIGNED (type)
9849 && TREE_CODE (arg1) == INTEGER_CST
9850 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9851 && TREE_INT_CST_HIGH (arg1) == -1)
9852 return omit_one_operand (type, integer_zero_node, arg0);
9854 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9855 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9856 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9857 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9860 /* Also optimize A % (C << N) where C is a power of 2,
9861 to A & ((C << N) - 1). */
9862 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9863 c = TREE_OPERAND (arg1, 0);
9865 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9867 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9868 arg1, integer_one_node);
9869 return fold_build2 (BIT_AND_EXPR, type,
9870 fold_convert (type, arg0),
9871 fold_convert (type, mask));
9875 /* X % -C is the same as X % C. */
9876 if (code == TRUNC_MOD_EXPR
9877 && !TYPE_UNSIGNED (type)
9878 && TREE_CODE (arg1) == INTEGER_CST
9879 && !TREE_CONSTANT_OVERFLOW (arg1)
9880 && TREE_INT_CST_HIGH (arg1) < 0
9882 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9883 && !sign_bit_p (arg1, arg1))
9884 return fold_build2 (code, type, fold_convert (type, arg0),
9885 fold_convert (type, negate_expr (arg1)));
9887 /* X % -Y is the same as X % Y. */
9888 if (code == TRUNC_MOD_EXPR
9889 && !TYPE_UNSIGNED (type)
9890 && TREE_CODE (arg1) == NEGATE_EXPR
9892 return fold_build2 (code, type, fold_convert (type, arg0),
9893 fold_convert (type, TREE_OPERAND (arg1, 0)));
9895 if (TREE_CODE (arg1) == INTEGER_CST
9896 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9897 return fold_convert (type, tem);
9903 if (integer_all_onesp (arg0))
9904 return omit_one_operand (type, arg0, arg1);
9908 /* Optimize -1 >> x for arithmetic right shifts. */
9909 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9910 return omit_one_operand (type, arg0, arg1);
9911 /* ... fall through ... */
9915 if (integer_zerop (arg1))
9916 return non_lvalue (fold_convert (type, arg0));
9917 if (integer_zerop (arg0))
9918 return omit_one_operand (type, arg0, arg1);
9920 /* Since negative shift count is not well-defined,
9921 don't try to compute it in the compiler. */
9922 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9925 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9926 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
9927 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9928 && host_integerp (TREE_OPERAND (arg0, 1), false)
9929 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9931 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9932 + TREE_INT_CST_LOW (arg1));
9934 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9935 being well defined. */
9936 if (low >= TYPE_PRECISION (type))
9938 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9939 low = low % TYPE_PRECISION (type);
9940 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9941 return build_int_cst (type, 0);
9943 low = TYPE_PRECISION (type) - 1;
9946 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9947 build_int_cst (type, low));
9950 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9951 into x & ((unsigned)-1 >> c) for unsigned types. */
9952 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9953 || (TYPE_UNSIGNED (type)
9954 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9955 && host_integerp (arg1, false)
9956 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9957 && host_integerp (TREE_OPERAND (arg0, 1), false)
9958 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9960 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9961 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9967 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9969 lshift = build_int_cst (type, -1);
9970 lshift = int_const_binop (code, lshift, arg1, 0);
9972 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9976 /* Rewrite an LROTATE_EXPR by a constant into an
9977 RROTATE_EXPR by a new constant. */
9978 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9980 tree tem = build_int_cst (NULL_TREE,
9981 GET_MODE_BITSIZE (TYPE_MODE (type)));
9982 tem = fold_convert (TREE_TYPE (arg1), tem);
9983 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9984 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9987 /* If we have a rotate of a bit operation with the rotate count and
9988 the second operand of the bit operation both constant,
9989 permute the two operations. */
9990 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9991 && (TREE_CODE (arg0) == BIT_AND_EXPR
9992 || TREE_CODE (arg0) == BIT_IOR_EXPR
9993 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9994 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9995 return fold_build2 (TREE_CODE (arg0), type,
9996 fold_build2 (code, type,
9997 TREE_OPERAND (arg0, 0), arg1),
9998 fold_build2 (code, type,
9999 TREE_OPERAND (arg0, 1), arg1));
10001 /* Two consecutive rotates adding up to the width of the mode can
10003 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10004 && TREE_CODE (arg0) == RROTATE_EXPR
10005 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10006 && TREE_INT_CST_HIGH (arg1) == 0
10007 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10008 && ((TREE_INT_CST_LOW (arg1)
10009 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10010 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10011 return TREE_OPERAND (arg0, 0);
10016 if (operand_equal_p (arg0, arg1, 0))
10017 return omit_one_operand (type, arg0, arg1);
10018 if (INTEGRAL_TYPE_P (type)
10019 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10020 return omit_one_operand (type, arg1, arg0);
10021 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10027 if (operand_equal_p (arg0, arg1, 0))
10028 return omit_one_operand (type, arg0, arg1);
10029 if (INTEGRAL_TYPE_P (type)
10030 && TYPE_MAX_VALUE (type)
10031 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10032 return omit_one_operand (type, arg1, arg0);
10033 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10038 case TRUTH_ANDIF_EXPR:
10039 /* Note that the operands of this must be ints
10040 and their values must be 0 or 1.
10041 ("true" is a fixed value perhaps depending on the language.) */
10042 /* If first arg is constant zero, return it. */
10043 if (integer_zerop (arg0))
10044 return fold_convert (type, arg0);
10045 case TRUTH_AND_EXPR:
10046 /* If either arg is constant true, drop it. */
10047 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10048 return non_lvalue (fold_convert (type, arg1));
10049 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10050 /* Preserve sequence points. */
10051 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10052 return non_lvalue (fold_convert (type, arg0));
10053 /* If second arg is constant zero, result is zero, but first arg
10054 must be evaluated. */
10055 if (integer_zerop (arg1))
10056 return omit_one_operand (type, arg1, arg0);
10057 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10058 case will be handled here. */
10059 if (integer_zerop (arg0))
10060 return omit_one_operand (type, arg0, arg1);
10062 /* !X && X is always false. */
10063 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10064 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10065 return omit_one_operand (type, integer_zero_node, arg1);
10066 /* X && !X is always false. */
10067 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10068 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10069 return omit_one_operand (type, integer_zero_node, arg0);
10071 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10072 means A >= Y && A != MAX, but in this case we know that
10075 if (!TREE_SIDE_EFFECTS (arg0)
10076 && !TREE_SIDE_EFFECTS (arg1))
10078 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10079 if (tem && !operand_equal_p (tem, arg0, 0))
10080 return fold_build2 (code, type, tem, arg1);
10082 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10083 if (tem && !operand_equal_p (tem, arg1, 0))
10084 return fold_build2 (code, type, arg0, tem);
10088 /* We only do these simplifications if we are optimizing. */
10092 /* Check for things like (A || B) && (A || C). We can convert this
10093 to A || (B && C). Note that either operator can be any of the four
10094 truth and/or operations and the transformation will still be
10095 valid. Also note that we only care about order for the
10096 ANDIF and ORIF operators. If B contains side effects, this
10097 might change the truth-value of A. */
10098 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10099 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10100 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10101 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10102 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10103 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10105 tree a00 = TREE_OPERAND (arg0, 0);
10106 tree a01 = TREE_OPERAND (arg0, 1);
10107 tree a10 = TREE_OPERAND (arg1, 0);
10108 tree a11 = TREE_OPERAND (arg1, 1);
10109 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10110 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10111 && (code == TRUTH_AND_EXPR
10112 || code == TRUTH_OR_EXPR));
10114 if (operand_equal_p (a00, a10, 0))
10115 return fold_build2 (TREE_CODE (arg0), type, a00,
10116 fold_build2 (code, type, a01, a11));
10117 else if (commutative && operand_equal_p (a00, a11, 0))
10118 return fold_build2 (TREE_CODE (arg0), type, a00,
10119 fold_build2 (code, type, a01, a10));
10120 else if (commutative && operand_equal_p (a01, a10, 0))
10121 return fold_build2 (TREE_CODE (arg0), type, a01,
10122 fold_build2 (code, type, a00, a11));
10124 /* This case if tricky because we must either have commutative
10125 operators or else A10 must not have side-effects. */
10127 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10128 && operand_equal_p (a01, a11, 0))
10129 return fold_build2 (TREE_CODE (arg0), type,
10130 fold_build2 (code, type, a00, a10),
10134 /* See if we can build a range comparison. */
10135 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10138 /* Check for the possibility of merging component references. If our
10139 lhs is another similar operation, try to merge its rhs with our
10140 rhs. Then try to merge our lhs and rhs. */
10141 if (TREE_CODE (arg0) == code
10142 && 0 != (tem = fold_truthop (code, type,
10143 TREE_OPERAND (arg0, 1), arg1)))
10144 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10146 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10151 case TRUTH_ORIF_EXPR:
10152 /* Note that the operands of this must be ints
10153 and their values must be 0 or true.
10154 ("true" is a fixed value perhaps depending on the language.) */
10155 /* If first arg is constant true, return it. */
10156 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10157 return fold_convert (type, arg0);
10158 case TRUTH_OR_EXPR:
10159 /* If either arg is constant zero, drop it. */
10160 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10161 return non_lvalue (fold_convert (type, arg1));
10162 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10163 /* Preserve sequence points. */
10164 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10165 return non_lvalue (fold_convert (type, arg0));
10166 /* If second arg is constant true, result is true, but we must
10167 evaluate first arg. */
10168 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10169 return omit_one_operand (type, arg1, arg0);
10170 /* Likewise for first arg, but note this only occurs here for
10172 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10173 return omit_one_operand (type, arg0, arg1);
10175 /* !X || X is always true. */
10176 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10177 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10178 return omit_one_operand (type, integer_one_node, arg1);
10179 /* X || !X is always true. */
10180 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10181 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10182 return omit_one_operand (type, integer_one_node, arg0);
10186 case TRUTH_XOR_EXPR:
10187 /* If the second arg is constant zero, drop it. */
10188 if (integer_zerop (arg1))
10189 return non_lvalue (fold_convert (type, arg0));
10190 /* If the second arg is constant true, this is a logical inversion. */
10191 if (integer_onep (arg1))
10193 /* Only call invert_truthvalue if operand is a truth value. */
10194 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10195 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10197 tem = invert_truthvalue (arg0);
10198 return non_lvalue (fold_convert (type, tem));
10200 /* Identical arguments cancel to zero. */
10201 if (operand_equal_p (arg0, arg1, 0))
10202 return omit_one_operand (type, integer_zero_node, arg0);
10204 /* !X ^ X is always true. */
10205 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10206 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10207 return omit_one_operand (type, integer_one_node, arg1);
10209 /* X ^ !X is always true. */
10210 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10211 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10212 return omit_one_operand (type, integer_one_node, arg0);
10218 tem = fold_comparison (code, type, op0, op1);
10219 if (tem != NULL_TREE)
10222 /* bool_var != 0 becomes bool_var. */
10223 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10224 && code == NE_EXPR)
10225 return non_lvalue (fold_convert (type, arg0));
10227 /* bool_var == 1 becomes bool_var. */
10228 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10229 && code == EQ_EXPR)
10230 return non_lvalue (fold_convert (type, arg0));
10232 /* bool_var != 1 becomes !bool_var. */
10233 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10234 && code == NE_EXPR)
10235 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10237 /* bool_var == 0 becomes !bool_var. */
10238 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10239 && code == EQ_EXPR)
10240 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10242 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10243 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10244 && TREE_CODE (arg1) == INTEGER_CST)
10245 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10246 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10249 /* If this is an equality comparison of the address of a non-weak
10250 object against zero, then we know the result. */
10251 if (TREE_CODE (arg0) == ADDR_EXPR
10252 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10253 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10254 && integer_zerop (arg1))
10255 return constant_boolean_node (code != EQ_EXPR, type);
10257 /* If this is an equality comparison of the address of two non-weak,
10258 unaliased symbols neither of which are extern (since we do not
10259 have access to attributes for externs), then we know the result. */
10260 if (TREE_CODE (arg0) == ADDR_EXPR
10261 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10262 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10263 && ! lookup_attribute ("alias",
10264 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10265 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10266 && TREE_CODE (arg1) == ADDR_EXPR
10267 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10268 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10269 && ! lookup_attribute ("alias",
10270 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10271 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10273 /* We know that we're looking at the address of two
10274 non-weak, unaliased, static _DECL nodes.
10276 It is both wasteful and incorrect to call operand_equal_p
10277 to compare the two ADDR_EXPR nodes. It is wasteful in that
10278 all we need to do is test pointer equality for the arguments
10279 to the two ADDR_EXPR nodes. It is incorrect to use
10280 operand_equal_p as that function is NOT equivalent to a
10281 C equality test. It can in fact return false for two
10282 objects which would test as equal using the C equality
10284 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10285 return constant_boolean_node (equal
10286 ? code == EQ_EXPR : code != EQ_EXPR,
10290 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10291 a MINUS_EXPR of a constant, we can convert it into a comparison with
10292 a revised constant as long as no overflow occurs. */
10293 if (TREE_CODE (arg1) == INTEGER_CST
10294 && (TREE_CODE (arg0) == PLUS_EXPR
10295 || TREE_CODE (arg0) == MINUS_EXPR)
10296 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10297 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10298 ? MINUS_EXPR : PLUS_EXPR,
10299 arg1, TREE_OPERAND (arg0, 1), 0))
10300 && ! TREE_CONSTANT_OVERFLOW (tem))
10301 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10303 /* Similarly for a NEGATE_EXPR. */
10304 if (TREE_CODE (arg0) == NEGATE_EXPR
10305 && TREE_CODE (arg1) == INTEGER_CST
10306 && 0 != (tem = negate_expr (arg1))
10307 && TREE_CODE (tem) == INTEGER_CST
10308 && ! TREE_CONSTANT_OVERFLOW (tem))
10309 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10311 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10312 for !=. Don't do this for ordered comparisons due to overflow. */
10313 if (TREE_CODE (arg0) == MINUS_EXPR
10314 && integer_zerop (arg1))
10315 return fold_build2 (code, type,
10316 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10318 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10319 if (TREE_CODE (arg0) == ABS_EXPR
10320 && (integer_zerop (arg1) || real_zerop (arg1)))
10321 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10323 /* If this is an EQ or NE comparison with zero and ARG0 is
10324 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10325 two operations, but the latter can be done in one less insn
10326 on machines that have only two-operand insns or on which a
10327 constant cannot be the first operand. */
10328 if (TREE_CODE (arg0) == BIT_AND_EXPR
10329 && integer_zerop (arg1))
10331 tree arg00 = TREE_OPERAND (arg0, 0);
10332 tree arg01 = TREE_OPERAND (arg0, 1);
10333 if (TREE_CODE (arg00) == LSHIFT_EXPR
10334 && integer_onep (TREE_OPERAND (arg00, 0)))
10336 fold_build2 (code, type,
10337 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10338 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10339 arg01, TREE_OPERAND (arg00, 1)),
10340 fold_convert (TREE_TYPE (arg0),
10341 integer_one_node)),
10343 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10344 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10346 fold_build2 (code, type,
10347 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10348 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10349 arg00, TREE_OPERAND (arg01, 1)),
10350 fold_convert (TREE_TYPE (arg0),
10351 integer_one_node)),
10355 /* If this is an NE or EQ comparison of zero against the result of a
10356 signed MOD operation whose second operand is a power of 2, make
10357 the MOD operation unsigned since it is simpler and equivalent. */
10358 if (integer_zerop (arg1)
10359 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10360 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10361 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10362 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10363 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10364 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10366 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10367 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10368 fold_convert (newtype,
10369 TREE_OPERAND (arg0, 0)),
10370 fold_convert (newtype,
10371 TREE_OPERAND (arg0, 1)));
10373 return fold_build2 (code, type, newmod,
10374 fold_convert (newtype, arg1));
10377 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10378 C1 is a valid shift constant, and C2 is a power of two, i.e.
10380 if (TREE_CODE (arg0) == BIT_AND_EXPR
10381 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10382 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10384 && integer_pow2p (TREE_OPERAND (arg0, 1))
10385 && integer_zerop (arg1))
10387 tree itype = TREE_TYPE (arg0);
10388 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10389 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10391 /* Check for a valid shift count. */
10392 if (TREE_INT_CST_HIGH (arg001) == 0
10393 && TREE_INT_CST_LOW (arg001) < prec)
10395 tree arg01 = TREE_OPERAND (arg0, 1);
10396 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10397 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10398 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10399 can be rewritten as (X & (C2 << C1)) != 0. */
10400 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10402 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10403 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10404 return fold_build2 (code, type, tem, arg1);
10406 /* Otherwise, for signed (arithmetic) shifts,
10407 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10408 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10409 else if (!TYPE_UNSIGNED (itype))
10410 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10411 arg000, build_int_cst (itype, 0));
10412 /* Otherwise, of unsigned (logical) shifts,
10413 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10414 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10416 return omit_one_operand (type,
10417 code == EQ_EXPR ? integer_one_node
10418 : integer_zero_node,
10423 /* If this is an NE comparison of zero with an AND of one, remove the
10424 comparison since the AND will give the correct value. */
10425 if (code == NE_EXPR
10426 && integer_zerop (arg1)
10427 && TREE_CODE (arg0) == BIT_AND_EXPR
10428 && integer_onep (TREE_OPERAND (arg0, 1)))
10429 return fold_convert (type, arg0);
10431 /* If we have (A & C) == C where C is a power of 2, convert this into
10432 (A & C) != 0. Similarly for NE_EXPR. */
10433 if (TREE_CODE (arg0) == BIT_AND_EXPR
10434 && integer_pow2p (TREE_OPERAND (arg0, 1))
10435 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10436 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10437 arg0, fold_convert (TREE_TYPE (arg0),
10438 integer_zero_node));
10440 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10441 bit, then fold the expression into A < 0 or A >= 0. */
10442 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10446 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10447 Similarly for NE_EXPR. */
10448 if (TREE_CODE (arg0) == BIT_AND_EXPR
10449 && TREE_CODE (arg1) == INTEGER_CST
10450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10452 tree notc = fold_build1 (BIT_NOT_EXPR,
10453 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10454 TREE_OPERAND (arg0, 1));
10455 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10457 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10458 if (integer_nonzerop (dandnotc))
10459 return omit_one_operand (type, rslt, arg0);
10462 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10463 Similarly for NE_EXPR. */
10464 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10465 && TREE_CODE (arg1) == INTEGER_CST
10466 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10468 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10469 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10470 TREE_OPERAND (arg0, 1), notd);
10471 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10472 if (integer_nonzerop (candnotd))
10473 return omit_one_operand (type, rslt, arg0);
10476 /* If this is a comparison of a field, we may be able to simplify it. */
10477 if (((TREE_CODE (arg0) == COMPONENT_REF
10478 && lang_hooks.can_use_bit_fields_p ())
10479 || TREE_CODE (arg0) == BIT_FIELD_REF)
10480 /* Handle the constant case even without -O
10481 to make sure the warnings are given. */
10482 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10484 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10489 /* Optimize comparisons of strlen vs zero to a compare of the
10490 first character of the string vs zero. To wit,
10491 strlen(ptr) == 0 => *ptr == 0
10492 strlen(ptr) != 0 => *ptr != 0
10493 Other cases should reduce to one of these two (or a constant)
10494 due to the return value of strlen being unsigned. */
10495 if (TREE_CODE (arg0) == CALL_EXPR
10496 && integer_zerop (arg1))
10498 tree fndecl = get_callee_fndecl (arg0);
10502 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10503 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10504 && (arglist = TREE_OPERAND (arg0, 1))
10505 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10506 && ! TREE_CHAIN (arglist))
10508 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10509 return fold_build2 (code, type, iref,
10510 build_int_cst (TREE_TYPE (iref), 0));
10514 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10515 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10516 if (TREE_CODE (arg0) == RSHIFT_EXPR
10517 && integer_zerop (arg1)
10518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10520 tree arg00 = TREE_OPERAND (arg0, 0);
10521 tree arg01 = TREE_OPERAND (arg0, 1);
10522 tree itype = TREE_TYPE (arg00);
10523 if (TREE_INT_CST_HIGH (arg01) == 0
10524 && TREE_INT_CST_LOW (arg01)
10525 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10527 if (TYPE_UNSIGNED (itype))
10529 itype = lang_hooks.types.signed_type (itype);
10530 arg00 = fold_convert (itype, arg00);
10532 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10533 type, arg00, build_int_cst (itype, 0));
10537 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10538 if (integer_zerop (arg1)
10539 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10540 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10541 TREE_OPERAND (arg0, 1));
10543 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10544 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10545 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10546 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10547 build_int_cst (TREE_TYPE (arg1), 0));
10548 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10549 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10550 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10551 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10552 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10553 build_int_cst (TREE_TYPE (arg1), 0));
10555 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10556 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10557 && TREE_CODE (arg1) == INTEGER_CST
10558 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10559 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10560 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10561 TREE_OPERAND (arg0, 1), arg1));
10563 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10564 (X & C) == 0 when C is a single bit. */
10565 if (TREE_CODE (arg0) == BIT_AND_EXPR
10566 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10567 && integer_zerop (arg1)
10568 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10570 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10571 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10572 TREE_OPERAND (arg0, 1));
10573 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10577 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10578 constant C is a power of two, i.e. a single bit. */
10579 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10580 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10581 && integer_zerop (arg1)
10582 && integer_pow2p (TREE_OPERAND (arg0, 1))
10583 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10584 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10586 tree arg00 = TREE_OPERAND (arg0, 0);
10587 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10588 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10591 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10592 when is C is a power of two, i.e. a single bit. */
10593 if (TREE_CODE (arg0) == BIT_AND_EXPR
10594 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10595 && integer_zerop (arg1)
10596 && integer_pow2p (TREE_OPERAND (arg0, 1))
10597 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10598 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10600 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10601 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10602 arg000, TREE_OPERAND (arg0, 1));
10603 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10604 tem, build_int_cst (TREE_TYPE (tem), 0));
10607 if (integer_zerop (arg1)
10608 && tree_expr_nonzero_p (arg0))
10610 tree res = constant_boolean_node (code==NE_EXPR, type);
10611 return omit_one_operand (type, res, arg0);
10619 tem = fold_comparison (code, type, op0, op1);
10620 if (tem != NULL_TREE)
10623 /* Transform comparisons of the form X +- C CMP X. */
10624 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10625 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10626 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10627 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10628 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10629 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10630 && !(flag_wrapv || flag_trapv))))
10632 tree arg01 = TREE_OPERAND (arg0, 1);
10633 enum tree_code code0 = TREE_CODE (arg0);
10636 if (TREE_CODE (arg01) == REAL_CST)
10637 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10639 is_positive = tree_int_cst_sgn (arg01);
10641 /* (X - c) > X becomes false. */
10642 if (code == GT_EXPR
10643 && ((code0 == MINUS_EXPR && is_positive >= 0)
10644 || (code0 == PLUS_EXPR && is_positive <= 0)))
10645 return constant_boolean_node (0, type);
10647 /* Likewise (X + c) < X becomes false. */
10648 if (code == LT_EXPR
10649 && ((code0 == PLUS_EXPR && is_positive >= 0)
10650 || (code0 == MINUS_EXPR && is_positive <= 0)))
10651 return constant_boolean_node (0, type);
10653 /* Convert (X - c) <= X to true. */
10654 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10656 && ((code0 == MINUS_EXPR && is_positive >= 0)
10657 || (code0 == PLUS_EXPR && is_positive <= 0)))
10658 return constant_boolean_node (1, type);
10660 /* Convert (X + c) >= X to true. */
10661 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10663 && ((code0 == PLUS_EXPR && is_positive >= 0)
10664 || (code0 == MINUS_EXPR && is_positive <= 0)))
10665 return constant_boolean_node (1, type);
10667 if (TREE_CODE (arg01) == INTEGER_CST)
10669 /* Convert X + c > X and X - c < X to true for integers. */
10670 if (code == GT_EXPR
10671 && ((code0 == PLUS_EXPR && is_positive > 0)
10672 || (code0 == MINUS_EXPR && is_positive < 0)))
10673 return constant_boolean_node (1, type);
10675 if (code == LT_EXPR
10676 && ((code0 == MINUS_EXPR && is_positive > 0)
10677 || (code0 == PLUS_EXPR && is_positive < 0)))
10678 return constant_boolean_node (1, type);
10680 /* Convert X + c <= X and X - c >= X to false for integers. */
10681 if (code == LE_EXPR
10682 && ((code0 == PLUS_EXPR && is_positive > 0)
10683 || (code0 == MINUS_EXPR && is_positive < 0)))
10684 return constant_boolean_node (0, type);
10686 if (code == GE_EXPR
10687 && ((code0 == MINUS_EXPR && is_positive > 0)
10688 || (code0 == PLUS_EXPR && is_positive < 0)))
10689 return constant_boolean_node (0, type);
10693 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10694 This transformation affects the cases which are handled in later
10695 optimizations involving comparisons with non-negative constants. */
10696 if (TREE_CODE (arg1) == INTEGER_CST
10697 && TREE_CODE (arg0) != INTEGER_CST
10698 && tree_int_cst_sgn (arg1) > 0)
10700 if (code == GE_EXPR)
10702 arg1 = const_binop (MINUS_EXPR, arg1,
10703 build_int_cst (TREE_TYPE (arg1), 1), 0);
10704 return fold_build2 (GT_EXPR, type, arg0,
10705 fold_convert (TREE_TYPE (arg0), arg1));
10707 if (code == LT_EXPR)
10709 arg1 = const_binop (MINUS_EXPR, arg1,
10710 build_int_cst (TREE_TYPE (arg1), 1), 0);
10711 return fold_build2 (LE_EXPR, type, arg0,
10712 fold_convert (TREE_TYPE (arg0), arg1));
10716 /* Comparisons with the highest or lowest possible integer of
10717 the specified size will have known values. */
10719 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10721 if (TREE_CODE (arg1) == INTEGER_CST
10722 && ! TREE_CONSTANT_OVERFLOW (arg1)
10723 && width <= 2 * HOST_BITS_PER_WIDE_INT
10724 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10725 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10727 HOST_WIDE_INT signed_max_hi;
10728 unsigned HOST_WIDE_INT signed_max_lo;
10729 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10731 if (width <= HOST_BITS_PER_WIDE_INT)
10733 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10738 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10740 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10746 max_lo = signed_max_lo;
10747 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10753 width -= HOST_BITS_PER_WIDE_INT;
10754 signed_max_lo = -1;
10755 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10760 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10762 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10767 max_hi = signed_max_hi;
10768 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10772 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10773 && TREE_INT_CST_LOW (arg1) == max_lo)
10777 return omit_one_operand (type, integer_zero_node, arg0);
10780 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10783 return omit_one_operand (type, integer_one_node, arg0);
10786 return fold_build2 (NE_EXPR, type, arg0, arg1);
10788 /* The GE_EXPR and LT_EXPR cases above are not normally
10789 reached because of previous transformations. */
10794 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10796 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10800 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10801 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10803 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10804 return fold_build2 (NE_EXPR, type, arg0, arg1);
10808 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10810 && TREE_INT_CST_LOW (arg1) == min_lo)
10814 return omit_one_operand (type, integer_zero_node, arg0);
10817 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10820 return omit_one_operand (type, integer_one_node, arg0);
10823 return fold_build2 (NE_EXPR, type, op0, op1);
10828 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10830 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10834 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10835 return fold_build2 (NE_EXPR, type, arg0, arg1);
10837 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10838 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10843 else if (!in_gimple_form
10844 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10845 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10846 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10847 /* signed_type does not work on pointer types. */
10848 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10850 /* The following case also applies to X < signed_max+1
10851 and X >= signed_max+1 because previous transformations. */
10852 if (code == LE_EXPR || code == GT_EXPR)
10855 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10856 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10857 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10858 type, fold_convert (st0, arg0),
10859 build_int_cst (st1, 0));
10865 /* If we are comparing an ABS_EXPR with a constant, we can
10866 convert all the cases into explicit comparisons, but they may
10867 well not be faster than doing the ABS and one comparison.
10868 But ABS (X) <= C is a range comparison, which becomes a subtraction
10869 and a comparison, and is probably faster. */
10870 if (code == LE_EXPR
10871 && TREE_CODE (arg1) == INTEGER_CST
10872 && TREE_CODE (arg0) == ABS_EXPR
10873 && ! TREE_SIDE_EFFECTS (arg0)
10874 && (0 != (tem = negate_expr (arg1)))
10875 && TREE_CODE (tem) == INTEGER_CST
10876 && ! TREE_CONSTANT_OVERFLOW (tem))
10877 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10878 build2 (GE_EXPR, type,
10879 TREE_OPERAND (arg0, 0), tem),
10880 build2 (LE_EXPR, type,
10881 TREE_OPERAND (arg0, 0), arg1));
10883 /* Convert ABS_EXPR<x> >= 0 to true. */
10884 if (code == GE_EXPR
10885 && tree_expr_nonnegative_p (arg0)
10886 && (integer_zerop (arg1)
10887 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10888 && real_zerop (arg1))))
10889 return omit_one_operand (type, integer_one_node, arg0);
10891 /* Convert ABS_EXPR<x> < 0 to false. */
10892 if (code == LT_EXPR
10893 && tree_expr_nonnegative_p (arg0)
10894 && (integer_zerop (arg1) || real_zerop (arg1)))
10895 return omit_one_operand (type, integer_zero_node, arg0);
10897 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10898 and similarly for >= into !=. */
10899 if ((code == LT_EXPR || code == GE_EXPR)
10900 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10901 && TREE_CODE (arg1) == LSHIFT_EXPR
10902 && integer_onep (TREE_OPERAND (arg1, 0)))
10903 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10904 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10905 TREE_OPERAND (arg1, 1)),
10906 build_int_cst (TREE_TYPE (arg0), 0));
10908 if ((code == LT_EXPR || code == GE_EXPR)
10909 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10910 && (TREE_CODE (arg1) == NOP_EXPR
10911 || TREE_CODE (arg1) == CONVERT_EXPR)
10912 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10913 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10915 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10916 fold_convert (TREE_TYPE (arg0),
10917 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10918 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10920 build_int_cst (TREE_TYPE (arg0), 0));
10924 case UNORDERED_EXPR:
10932 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10934 t1 = fold_relational_const (code, type, arg0, arg1);
10935 if (t1 != NULL_TREE)
10939 /* If the first operand is NaN, the result is constant. */
10940 if (TREE_CODE (arg0) == REAL_CST
10941 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10942 && (code != LTGT_EXPR || ! flag_trapping_math))
10944 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10945 ? integer_zero_node
10946 : integer_one_node;
10947 return omit_one_operand (type, t1, arg1);
10950 /* If the second operand is NaN, the result is constant. */
10951 if (TREE_CODE (arg1) == REAL_CST
10952 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10953 && (code != LTGT_EXPR || ! flag_trapping_math))
10955 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10956 ? integer_zero_node
10957 : integer_one_node;
10958 return omit_one_operand (type, t1, arg0);
10961 /* Simplify unordered comparison of something with itself. */
10962 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10963 && operand_equal_p (arg0, arg1, 0))
10964 return constant_boolean_node (1, type);
10966 if (code == LTGT_EXPR
10967 && !flag_trapping_math
10968 && operand_equal_p (arg0, arg1, 0))
10969 return constant_boolean_node (0, type);
10971 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10973 tree targ0 = strip_float_extensions (arg0);
10974 tree targ1 = strip_float_extensions (arg1);
10975 tree newtype = TREE_TYPE (targ0);
10977 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10978 newtype = TREE_TYPE (targ1);
10980 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10981 return fold_build2 (code, type, fold_convert (newtype, targ0),
10982 fold_convert (newtype, targ1));
10987 case COMPOUND_EXPR:
10988 /* When pedantic, a compound expression can be neither an lvalue
10989 nor an integer constant expression. */
10990 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10992 /* Don't let (0, 0) be null pointer constant. */
10993 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10994 : fold_convert (type, arg1);
10995 return pedantic_non_lvalue (tem);
10998 if ((TREE_CODE (arg0) == REAL_CST
10999 && TREE_CODE (arg1) == REAL_CST)
11000 || (TREE_CODE (arg0) == INTEGER_CST
11001 && TREE_CODE (arg1) == INTEGER_CST))
11002 return build_complex (type, arg0, arg1);
11006 /* An ASSERT_EXPR should never be passed to fold_binary. */
11007 gcc_unreachable ();
11011 } /* switch (code) */
11014 /* Callback for walk_tree, looking for LABEL_EXPR.
11015 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11016 Do not check the sub-tree of GOTO_EXPR. */
11019 contains_label_1 (tree *tp,
11020 int *walk_subtrees,
11021 void *data ATTRIBUTE_UNUSED)
11023 switch (TREE_CODE (*tp))
11028 *walk_subtrees = 0;
11035 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11036 accessible from outside the sub-tree. Returns NULL_TREE if no
11037 addressable label is found. */
11040 contains_label_p (tree st)
11042 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11045 /* Fold a ternary expression of code CODE and type TYPE with operands
11046 OP0, OP1, and OP2. Return the folded expression if folding is
11047 successful. Otherwise, return NULL_TREE. */
11050 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11053 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11054 enum tree_code_class kind = TREE_CODE_CLASS (code);
11056 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11057 && TREE_CODE_LENGTH (code) == 3);
11059 /* Strip any conversions that don't change the mode. This is safe
11060 for every expression, except for a comparison expression because
11061 its signedness is derived from its operands. So, in the latter
11062 case, only strip conversions that don't change the signedness.
11064 Note that this is done as an internal manipulation within the
11065 constant folder, in order to find the simplest representation of
11066 the arguments so that their form can be studied. In any cases,
11067 the appropriate type conversions should be put back in the tree
11068 that will get out of the constant folder. */
11083 case COMPONENT_REF:
11084 if (TREE_CODE (arg0) == CONSTRUCTOR
11085 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11087 unsigned HOST_WIDE_INT idx;
11089 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11096 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11097 so all simple results must be passed through pedantic_non_lvalue. */
11098 if (TREE_CODE (arg0) == INTEGER_CST)
11100 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11101 tem = integer_zerop (arg0) ? op2 : op1;
11102 /* Only optimize constant conditions when the selected branch
11103 has the same type as the COND_EXPR. This avoids optimizing
11104 away "c ? x : throw", where the throw has a void type.
11105 Avoid throwing away that operand which contains label. */
11106 if ((!TREE_SIDE_EFFECTS (unused_op)
11107 || !contains_label_p (unused_op))
11108 && (! VOID_TYPE_P (TREE_TYPE (tem))
11109 || VOID_TYPE_P (type)))
11110 return pedantic_non_lvalue (tem);
11113 if (operand_equal_p (arg1, op2, 0))
11114 return pedantic_omit_one_operand (type, arg1, arg0);
11116 /* If we have A op B ? A : C, we may be able to convert this to a
11117 simpler expression, depending on the operation and the values
11118 of B and C. Signed zeros prevent all of these transformations,
11119 for reasons given above each one.
11121 Also try swapping the arguments and inverting the conditional. */
11122 if (COMPARISON_CLASS_P (arg0)
11123 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11124 arg1, TREE_OPERAND (arg0, 1))
11125 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11127 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11132 if (COMPARISON_CLASS_P (arg0)
11133 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11135 TREE_OPERAND (arg0, 1))
11136 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11138 tem = fold_truth_not_expr (arg0);
11139 if (tem && COMPARISON_CLASS_P (tem))
11141 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11147 /* If the second operand is simpler than the third, swap them
11148 since that produces better jump optimization results. */
11149 if (truth_value_p (TREE_CODE (arg0))
11150 && tree_swap_operands_p (op1, op2, false))
11152 /* See if this can be inverted. If it can't, possibly because
11153 it was a floating-point inequality comparison, don't do
11155 tem = fold_truth_not_expr (arg0);
11157 return fold_build3 (code, type, tem, op2, op1);
11160 /* Convert A ? 1 : 0 to simply A. */
11161 if (integer_onep (op1)
11162 && integer_zerop (op2)
11163 /* If we try to convert OP0 to our type, the
11164 call to fold will try to move the conversion inside
11165 a COND, which will recurse. In that case, the COND_EXPR
11166 is probably the best choice, so leave it alone. */
11167 && type == TREE_TYPE (arg0))
11168 return pedantic_non_lvalue (arg0);
11170 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11171 over COND_EXPR in cases such as floating point comparisons. */
11172 if (integer_zerop (op1)
11173 && integer_onep (op2)
11174 && truth_value_p (TREE_CODE (arg0)))
11175 return pedantic_non_lvalue (fold_convert (type,
11176 invert_truthvalue (arg0)));
11178 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11179 if (TREE_CODE (arg0) == LT_EXPR
11180 && integer_zerop (TREE_OPERAND (arg0, 1))
11181 && integer_zerop (op2)
11182 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11183 return fold_convert (type,
11184 fold_build2 (BIT_AND_EXPR,
11185 TREE_TYPE (tem), tem,
11186 fold_convert (TREE_TYPE (tem), arg1)));
11188 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11189 already handled above. */
11190 if (TREE_CODE (arg0) == BIT_AND_EXPR
11191 && integer_onep (TREE_OPERAND (arg0, 1))
11192 && integer_zerop (op2)
11193 && integer_pow2p (arg1))
11195 tree tem = TREE_OPERAND (arg0, 0);
11197 if (TREE_CODE (tem) == RSHIFT_EXPR
11198 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11199 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11200 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11201 return fold_build2 (BIT_AND_EXPR, type,
11202 TREE_OPERAND (tem, 0), arg1);
11205 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11206 is probably obsolete because the first operand should be a
11207 truth value (that's why we have the two cases above), but let's
11208 leave it in until we can confirm this for all front-ends. */
11209 if (integer_zerop (op2)
11210 && TREE_CODE (arg0) == NE_EXPR
11211 && integer_zerop (TREE_OPERAND (arg0, 1))
11212 && integer_pow2p (arg1)
11213 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11214 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11215 arg1, OEP_ONLY_CONST))
11216 return pedantic_non_lvalue (fold_convert (type,
11217 TREE_OPERAND (arg0, 0)));
11219 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11220 if (integer_zerop (op2)
11221 && truth_value_p (TREE_CODE (arg0))
11222 && truth_value_p (TREE_CODE (arg1)))
11223 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11224 fold_convert (type, arg0),
11227 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11228 if (integer_onep (op2)
11229 && truth_value_p (TREE_CODE (arg0))
11230 && truth_value_p (TREE_CODE (arg1)))
11232 /* Only perform transformation if ARG0 is easily inverted. */
11233 tem = fold_truth_not_expr (arg0);
11235 return fold_build2 (TRUTH_ORIF_EXPR, type,
11236 fold_convert (type, tem),
11240 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11241 if (integer_zerop (arg1)
11242 && truth_value_p (TREE_CODE (arg0))
11243 && truth_value_p (TREE_CODE (op2)))
11245 /* Only perform transformation if ARG0 is easily inverted. */
11246 tem = fold_truth_not_expr (arg0);
11248 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11249 fold_convert (type, tem),
11253 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11254 if (integer_onep (arg1)
11255 && truth_value_p (TREE_CODE (arg0))
11256 && truth_value_p (TREE_CODE (op2)))
11257 return fold_build2 (TRUTH_ORIF_EXPR, type,
11258 fold_convert (type, arg0),
11264 /* Check for a built-in function. */
11265 if (TREE_CODE (op0) == ADDR_EXPR
11266 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11267 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11268 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11271 case BIT_FIELD_REF:
11272 if (TREE_CODE (arg0) == VECTOR_CST
11273 && type == TREE_TYPE (TREE_TYPE (arg0))
11274 && host_integerp (arg1, 1)
11275 && host_integerp (op2, 1))
11277 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11278 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11281 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11282 && (idx % width) == 0
11283 && (idx = idx / width)
11284 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11286 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11287 while (idx-- > 0 && elements)
11288 elements = TREE_CHAIN (elements);
11290 return TREE_VALUE (elements);
11292 return fold_convert (type, integer_zero_node);
11299 } /* switch (code) */
11302 /* Perform constant folding and related simplification of EXPR.
11303 The related simplifications include x*1 => x, x*0 => 0, etc.,
11304 and application of the associative law.
11305 NOP_EXPR conversions may be removed freely (as long as we
11306 are careful not to change the type of the overall expression).
11307 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11308 but we can constant-fold them if they have constant operands. */
11310 #ifdef ENABLE_FOLD_CHECKING
11311 # define fold(x) fold_1 (x)
11312 static tree fold_1 (tree);
11318 const tree t = expr;
11319 enum tree_code code = TREE_CODE (t);
11320 enum tree_code_class kind = TREE_CODE_CLASS (code);
11323 /* Return right away if a constant. */
11324 if (kind == tcc_constant)
11327 if (IS_EXPR_CODE_CLASS (kind))
11329 tree type = TREE_TYPE (t);
11330 tree op0, op1, op2;
11332 switch (TREE_CODE_LENGTH (code))
11335 op0 = TREE_OPERAND (t, 0);
11336 tem = fold_unary (code, type, op0);
11337 return tem ? tem : expr;
11339 op0 = TREE_OPERAND (t, 0);
11340 op1 = TREE_OPERAND (t, 1);
11341 tem = fold_binary (code, type, op0, op1);
11342 return tem ? tem : expr;
11344 op0 = TREE_OPERAND (t, 0);
11345 op1 = TREE_OPERAND (t, 1);
11346 op2 = TREE_OPERAND (t, 2);
11347 tem = fold_ternary (code, type, op0, op1, op2);
11348 return tem ? tem : expr;
11357 return fold (DECL_INITIAL (t));
11361 } /* switch (code) */
11364 #ifdef ENABLE_FOLD_CHECKING
11367 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11368 static void fold_check_failed (tree, tree);
11369 void print_fold_checksum (tree);
11371 /* When --enable-checking=fold, compute a digest of expr before
11372 and after actual fold call to see if fold did not accidentally
11373 change original expr. */
11379 struct md5_ctx ctx;
11380 unsigned char checksum_before[16], checksum_after[16];
11383 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11384 md5_init_ctx (&ctx);
11385 fold_checksum_tree (expr, &ctx, ht);
11386 md5_finish_ctx (&ctx, checksum_before);
11389 ret = fold_1 (expr);
11391 md5_init_ctx (&ctx);
11392 fold_checksum_tree (expr, &ctx, ht);
11393 md5_finish_ctx (&ctx, checksum_after);
11396 if (memcmp (checksum_before, checksum_after, 16))
11397 fold_check_failed (expr, ret);
11403 print_fold_checksum (tree expr)
11405 struct md5_ctx ctx;
11406 unsigned char checksum[16], cnt;
11409 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11410 md5_init_ctx (&ctx);
11411 fold_checksum_tree (expr, &ctx, ht);
11412 md5_finish_ctx (&ctx, checksum);
11414 for (cnt = 0; cnt < 16; ++cnt)
11415 fprintf (stderr, "%02x", checksum[cnt]);
11416 putc ('\n', stderr);
11420 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11422 internal_error ("fold check: original tree changed by fold");
11426 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11429 enum tree_code code;
11430 struct tree_function_decl buf;
11435 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11436 <= sizeof (struct tree_function_decl))
11437 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11440 slot = htab_find_slot (ht, expr, INSERT);
11444 code = TREE_CODE (expr);
11445 if (TREE_CODE_CLASS (code) == tcc_declaration
11446 && DECL_ASSEMBLER_NAME_SET_P (expr))
11448 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11449 memcpy ((char *) &buf, expr, tree_size (expr));
11450 expr = (tree) &buf;
11451 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11453 else if (TREE_CODE_CLASS (code) == tcc_type
11454 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11455 || TYPE_CACHED_VALUES_P (expr)
11456 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11458 /* Allow these fields to be modified. */
11459 memcpy ((char *) &buf, expr, tree_size (expr));
11460 expr = (tree) &buf;
11461 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11462 TYPE_POINTER_TO (expr) = NULL;
11463 TYPE_REFERENCE_TO (expr) = NULL;
11464 if (TYPE_CACHED_VALUES_P (expr))
11466 TYPE_CACHED_VALUES_P (expr) = 0;
11467 TYPE_CACHED_VALUES (expr) = NULL;
11470 md5_process_bytes (expr, tree_size (expr), ctx);
11471 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11472 if (TREE_CODE_CLASS (code) != tcc_type
11473 && TREE_CODE_CLASS (code) != tcc_declaration
11474 && code != TREE_LIST)
11475 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11476 switch (TREE_CODE_CLASS (code))
11482 md5_process_bytes (TREE_STRING_POINTER (expr),
11483 TREE_STRING_LENGTH (expr), ctx);
11486 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11487 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11490 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11496 case tcc_exceptional:
11500 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11501 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11502 expr = TREE_CHAIN (expr);
11503 goto recursive_label;
11506 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11507 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11513 case tcc_expression:
11514 case tcc_reference:
11515 case tcc_comparison:
11518 case tcc_statement:
11519 len = TREE_CODE_LENGTH (code);
11520 for (i = 0; i < len; ++i)
11521 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11523 case tcc_declaration:
11524 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11525 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11526 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11528 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11529 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11530 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11531 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11532 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11534 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11535 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11537 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11539 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11540 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11541 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11545 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11546 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11547 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11548 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11549 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11550 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11551 if (INTEGRAL_TYPE_P (expr)
11552 || SCALAR_FLOAT_TYPE_P (expr))
11554 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11555 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11557 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11558 if (TREE_CODE (expr) == RECORD_TYPE
11559 || TREE_CODE (expr) == UNION_TYPE
11560 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11561 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11562 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11571 /* Fold a unary tree expression with code CODE of type TYPE with an
11572 operand OP0. Return a folded expression if successful. Otherwise,
11573 return a tree expression with code CODE of type TYPE with an
11577 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11580 #ifdef ENABLE_FOLD_CHECKING
11581 unsigned char checksum_before[16], checksum_after[16];
11582 struct md5_ctx ctx;
11585 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11586 md5_init_ctx (&ctx);
11587 fold_checksum_tree (op0, &ctx, ht);
11588 md5_finish_ctx (&ctx, checksum_before);
11592 tem = fold_unary (code, type, op0);
11594 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11596 #ifdef ENABLE_FOLD_CHECKING
11597 md5_init_ctx (&ctx);
11598 fold_checksum_tree (op0, &ctx, ht);
11599 md5_finish_ctx (&ctx, checksum_after);
11602 if (memcmp (checksum_before, checksum_after, 16))
11603 fold_check_failed (op0, tem);
11608 /* Fold a binary tree expression with code CODE of type TYPE with
11609 operands OP0 and OP1. Return a folded expression if successful.
11610 Otherwise, return a tree expression with code CODE of type TYPE
11611 with operands OP0 and OP1. */
11614 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11618 #ifdef ENABLE_FOLD_CHECKING
11619 unsigned char checksum_before_op0[16],
11620 checksum_before_op1[16],
11621 checksum_after_op0[16],
11622 checksum_after_op1[16];
11623 struct md5_ctx ctx;
11626 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11627 md5_init_ctx (&ctx);
11628 fold_checksum_tree (op0, &ctx, ht);
11629 md5_finish_ctx (&ctx, checksum_before_op0);
11632 md5_init_ctx (&ctx);
11633 fold_checksum_tree (op1, &ctx, ht);
11634 md5_finish_ctx (&ctx, checksum_before_op1);
11638 tem = fold_binary (code, type, op0, op1);
11640 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11642 #ifdef ENABLE_FOLD_CHECKING
11643 md5_init_ctx (&ctx);
11644 fold_checksum_tree (op0, &ctx, ht);
11645 md5_finish_ctx (&ctx, checksum_after_op0);
11648 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11649 fold_check_failed (op0, tem);
11651 md5_init_ctx (&ctx);
11652 fold_checksum_tree (op1, &ctx, ht);
11653 md5_finish_ctx (&ctx, checksum_after_op1);
11656 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11657 fold_check_failed (op1, tem);
11662 /* Fold a ternary tree expression with code CODE of type TYPE with
11663 operands OP0, OP1, and OP2. Return a folded expression if
11664 successful. Otherwise, return a tree expression with code CODE of
11665 type TYPE with operands OP0, OP1, and OP2. */
11668 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11672 #ifdef ENABLE_FOLD_CHECKING
11673 unsigned char checksum_before_op0[16],
11674 checksum_before_op1[16],
11675 checksum_before_op2[16],
11676 checksum_after_op0[16],
11677 checksum_after_op1[16],
11678 checksum_after_op2[16];
11679 struct md5_ctx ctx;
11682 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11683 md5_init_ctx (&ctx);
11684 fold_checksum_tree (op0, &ctx, ht);
11685 md5_finish_ctx (&ctx, checksum_before_op0);
11688 md5_init_ctx (&ctx);
11689 fold_checksum_tree (op1, &ctx, ht);
11690 md5_finish_ctx (&ctx, checksum_before_op1);
11693 md5_init_ctx (&ctx);
11694 fold_checksum_tree (op2, &ctx, ht);
11695 md5_finish_ctx (&ctx, checksum_before_op2);
11699 tem = fold_ternary (code, type, op0, op1, op2);
11701 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11703 #ifdef ENABLE_FOLD_CHECKING
11704 md5_init_ctx (&ctx);
11705 fold_checksum_tree (op0, &ctx, ht);
11706 md5_finish_ctx (&ctx, checksum_after_op0);
11709 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11710 fold_check_failed (op0, tem);
11712 md5_init_ctx (&ctx);
11713 fold_checksum_tree (op1, &ctx, ht);
11714 md5_finish_ctx (&ctx, checksum_after_op1);
11717 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11718 fold_check_failed (op1, tem);
11720 md5_init_ctx (&ctx);
11721 fold_checksum_tree (op2, &ctx, ht);
11722 md5_finish_ctx (&ctx, checksum_after_op2);
11725 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11726 fold_check_failed (op2, tem);
11731 /* Perform constant folding and related simplification of initializer
11732 expression EXPR. These behave identically to "fold_buildN" but ignore
11733 potential run-time traps and exceptions that fold must preserve. */
11735 #define START_FOLD_INIT \
11736 int saved_signaling_nans = flag_signaling_nans;\
11737 int saved_trapping_math = flag_trapping_math;\
11738 int saved_rounding_math = flag_rounding_math;\
11739 int saved_trapv = flag_trapv;\
11740 int saved_folding_initializer = folding_initializer;\
11741 flag_signaling_nans = 0;\
11742 flag_trapping_math = 0;\
11743 flag_rounding_math = 0;\
11745 folding_initializer = 1;
11747 #define END_FOLD_INIT \
11748 flag_signaling_nans = saved_signaling_nans;\
11749 flag_trapping_math = saved_trapping_math;\
11750 flag_rounding_math = saved_rounding_math;\
11751 flag_trapv = saved_trapv;\
11752 folding_initializer = saved_folding_initializer;
11755 fold_build1_initializer (enum tree_code code, tree type, tree op)
11760 result = fold_build1 (code, type, op);
11767 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11772 result = fold_build2 (code, type, op0, op1);
11779 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11785 result = fold_build3 (code, type, op0, op1, op2);
11791 #undef START_FOLD_INIT
11792 #undef END_FOLD_INIT
11794 /* Determine if first argument is a multiple of second argument. Return 0 if
11795 it is not, or we cannot easily determined it to be.
11797 An example of the sort of thing we care about (at this point; this routine
11798 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11799 fold cases do now) is discovering that
11801 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11807 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11809 This code also handles discovering that
11811 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11813 is a multiple of 8 so we don't have to worry about dealing with a
11814 possible remainder.
11816 Note that we *look* inside a SAVE_EXPR only to determine how it was
11817 calculated; it is not safe for fold to do much of anything else with the
11818 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11819 at run time. For example, the latter example above *cannot* be implemented
11820 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11821 evaluation time of the original SAVE_EXPR is not necessarily the same at
11822 the time the new expression is evaluated. The only optimization of this
11823 sort that would be valid is changing
11825 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11829 SAVE_EXPR (I) * SAVE_EXPR (J)
11831 (where the same SAVE_EXPR (J) is used in the original and the
11832 transformed version). */
11835 multiple_of_p (tree type, tree top, tree bottom)
11837 if (operand_equal_p (top, bottom, 0))
11840 if (TREE_CODE (type) != INTEGER_TYPE)
11843 switch (TREE_CODE (top))
11846 /* Bitwise and provides a power of two multiple. If the mask is
11847 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11848 if (!integer_pow2p (bottom))
11853 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11854 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11858 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11859 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11862 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11866 op1 = TREE_OPERAND (top, 1);
11867 /* const_binop may not detect overflow correctly,
11868 so check for it explicitly here. */
11869 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11870 > TREE_INT_CST_LOW (op1)
11871 && TREE_INT_CST_HIGH (op1) == 0
11872 && 0 != (t1 = fold_convert (type,
11873 const_binop (LSHIFT_EXPR,
11876 && ! TREE_OVERFLOW (t1))
11877 return multiple_of_p (type, t1, bottom);
11882 /* Can't handle conversions from non-integral or wider integral type. */
11883 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11884 || (TYPE_PRECISION (type)
11885 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11888 /* .. fall through ... */
11891 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11894 if (TREE_CODE (bottom) != INTEGER_CST
11895 || (TYPE_UNSIGNED (type)
11896 && (tree_int_cst_sgn (top) < 0
11897 || tree_int_cst_sgn (bottom) < 0)))
11899 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11907 /* Return true if `t' is known to be non-negative. */
11910 tree_expr_nonnegative_p (tree t)
11912 if (t == error_mark_node)
11915 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11918 switch (TREE_CODE (t))
11921 /* Query VRP to see if it has recorded any information about
11922 the range of this object. */
11923 return ssa_name_nonnegative_p (t);
11926 /* We can't return 1 if flag_wrapv is set because
11927 ABS_EXPR<INT_MIN> = INT_MIN. */
11928 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11933 return tree_int_cst_sgn (t) >= 0;
11936 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11939 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11940 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11941 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11943 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11944 both unsigned and at least 2 bits shorter than the result. */
11945 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11946 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11947 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11949 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11950 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11951 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11952 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11954 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11955 TYPE_PRECISION (inner2)) + 1;
11956 return prec < TYPE_PRECISION (TREE_TYPE (t));
11962 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11964 /* x * x for floating point x is always non-negative. */
11965 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11967 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11968 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11971 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11972 both unsigned and their total bits is shorter than the result. */
11973 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11974 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11975 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11977 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11978 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11979 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11980 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11981 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11982 < TYPE_PRECISION (TREE_TYPE (t));
11988 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11989 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11995 case TRUNC_DIV_EXPR:
11996 case CEIL_DIV_EXPR:
11997 case FLOOR_DIV_EXPR:
11998 case ROUND_DIV_EXPR:
11999 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12000 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12002 case TRUNC_MOD_EXPR:
12003 case CEIL_MOD_EXPR:
12004 case FLOOR_MOD_EXPR:
12005 case ROUND_MOD_EXPR:
12007 case NON_LVALUE_EXPR:
12009 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12011 case COMPOUND_EXPR:
12013 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12016 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12019 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12020 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12024 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12025 tree outer_type = TREE_TYPE (t);
12027 if (TREE_CODE (outer_type) == REAL_TYPE)
12029 if (TREE_CODE (inner_type) == REAL_TYPE)
12030 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12031 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12033 if (TYPE_UNSIGNED (inner_type))
12035 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12038 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12040 if (TREE_CODE (inner_type) == REAL_TYPE)
12041 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12042 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12043 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12044 && TYPE_UNSIGNED (inner_type);
12051 tree temp = TARGET_EXPR_SLOT (t);
12052 t = TARGET_EXPR_INITIAL (t);
12054 /* If the initializer is non-void, then it's a normal expression
12055 that will be assigned to the slot. */
12056 if (!VOID_TYPE_P (t))
12057 return tree_expr_nonnegative_p (t);
12059 /* Otherwise, the initializer sets the slot in some way. One common
12060 way is an assignment statement at the end of the initializer. */
12063 if (TREE_CODE (t) == BIND_EXPR)
12064 t = expr_last (BIND_EXPR_BODY (t));
12065 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12066 || TREE_CODE (t) == TRY_CATCH_EXPR)
12067 t = expr_last (TREE_OPERAND (t, 0));
12068 else if (TREE_CODE (t) == STATEMENT_LIST)
12073 if (TREE_CODE (t) == MODIFY_EXPR
12074 && TREE_OPERAND (t, 0) == temp)
12075 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12082 tree fndecl = get_callee_fndecl (t);
12083 tree arglist = TREE_OPERAND (t, 1);
12084 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12085 switch (DECL_FUNCTION_CODE (fndecl))
12087 CASE_FLT_FN (BUILT_IN_ACOS):
12088 CASE_FLT_FN (BUILT_IN_ACOSH):
12089 CASE_FLT_FN (BUILT_IN_CABS):
12090 CASE_FLT_FN (BUILT_IN_COSH):
12091 CASE_FLT_FN (BUILT_IN_ERFC):
12092 CASE_FLT_FN (BUILT_IN_EXP):
12093 CASE_FLT_FN (BUILT_IN_EXP10):
12094 CASE_FLT_FN (BUILT_IN_EXP2):
12095 CASE_FLT_FN (BUILT_IN_FABS):
12096 CASE_FLT_FN (BUILT_IN_FDIM):
12097 CASE_FLT_FN (BUILT_IN_HYPOT):
12098 CASE_FLT_FN (BUILT_IN_POW10):
12099 CASE_INT_FN (BUILT_IN_FFS):
12100 CASE_INT_FN (BUILT_IN_PARITY):
12101 CASE_INT_FN (BUILT_IN_POPCOUNT):
12105 CASE_FLT_FN (BUILT_IN_SQRT):
12106 /* sqrt(-0.0) is -0.0. */
12107 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12109 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12111 CASE_FLT_FN (BUILT_IN_ASINH):
12112 CASE_FLT_FN (BUILT_IN_ATAN):
12113 CASE_FLT_FN (BUILT_IN_ATANH):
12114 CASE_FLT_FN (BUILT_IN_CBRT):
12115 CASE_FLT_FN (BUILT_IN_CEIL):
12116 CASE_FLT_FN (BUILT_IN_ERF):
12117 CASE_FLT_FN (BUILT_IN_EXPM1):
12118 CASE_FLT_FN (BUILT_IN_FLOOR):
12119 CASE_FLT_FN (BUILT_IN_FMOD):
12120 CASE_FLT_FN (BUILT_IN_FREXP):
12121 CASE_FLT_FN (BUILT_IN_LCEIL):
12122 CASE_FLT_FN (BUILT_IN_LDEXP):
12123 CASE_FLT_FN (BUILT_IN_LFLOOR):
12124 CASE_FLT_FN (BUILT_IN_LLCEIL):
12125 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12126 CASE_FLT_FN (BUILT_IN_LLRINT):
12127 CASE_FLT_FN (BUILT_IN_LLROUND):
12128 CASE_FLT_FN (BUILT_IN_LRINT):
12129 CASE_FLT_FN (BUILT_IN_LROUND):
12130 CASE_FLT_FN (BUILT_IN_MODF):
12131 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12132 CASE_FLT_FN (BUILT_IN_POW):
12133 CASE_FLT_FN (BUILT_IN_RINT):
12134 CASE_FLT_FN (BUILT_IN_ROUND):
12135 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12136 CASE_FLT_FN (BUILT_IN_SINH):
12137 CASE_FLT_FN (BUILT_IN_TANH):
12138 CASE_FLT_FN (BUILT_IN_TRUNC):
12139 /* True if the 1st argument is nonnegative. */
12140 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12142 CASE_FLT_FN (BUILT_IN_FMAX):
12143 /* True if the 1st OR 2nd arguments are nonnegative. */
12144 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12145 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12147 CASE_FLT_FN (BUILT_IN_FMIN):
12148 /* True if the 1st AND 2nd arguments are nonnegative. */
12149 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12150 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12152 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12153 /* True if the 2nd argument is nonnegative. */
12154 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12161 /* ... fall through ... */
12164 if (truth_value_p (TREE_CODE (t)))
12165 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12169 /* We don't know sign of `t', so be conservative and return false. */
12173 /* Return true when T is an address and is known to be nonzero.
12174 For floating point we further ensure that T is not denormal.
12175 Similar logic is present in nonzero_address in rtlanal.h. */
12178 tree_expr_nonzero_p (tree t)
12180 tree type = TREE_TYPE (t);
12182 /* Doing something useful for floating point would need more work. */
12183 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12186 switch (TREE_CODE (t))
12189 /* Query VRP to see if it has recorded any information about
12190 the range of this object. */
12191 return ssa_name_nonzero_p (t);
12194 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12197 /* We used to test for !integer_zerop here. This does not work correctly
12198 if TREE_CONSTANT_OVERFLOW (t). */
12199 return (TREE_INT_CST_LOW (t) != 0
12200 || TREE_INT_CST_HIGH (t) != 0);
12203 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12205 /* With the presence of negative values it is hard
12206 to say something. */
12207 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12208 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12210 /* One of operands must be positive and the other non-negative. */
12211 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12212 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12217 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12219 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12220 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12226 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12227 tree outer_type = TREE_TYPE (t);
12229 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12230 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12236 tree base = get_base_address (TREE_OPERAND (t, 0));
12241 /* Weak declarations may link to NULL. */
12242 if (VAR_OR_FUNCTION_DECL_P (base))
12243 return !DECL_WEAK (base);
12245 /* Constants are never weak. */
12246 if (CONSTANT_CLASS_P (base))
12253 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12254 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12257 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12258 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12261 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12263 /* When both operands are nonzero, then MAX must be too. */
12264 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12267 /* MAX where operand 0 is positive is positive. */
12268 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12270 /* MAX where operand 1 is positive is positive. */
12271 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12272 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12276 case COMPOUND_EXPR:
12279 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12282 case NON_LVALUE_EXPR:
12283 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12286 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12287 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12290 return alloca_call_p (t);
12298 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12299 attempt to fold the expression to a constant without modifying TYPE,
12302 If the expression could be simplified to a constant, then return
12303 the constant. If the expression would not be simplified to a
12304 constant, then return NULL_TREE. */
12307 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12309 tree tem = fold_binary (code, type, op0, op1);
12310 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12313 /* Given the components of a unary expression CODE, TYPE and OP0,
12314 attempt to fold the expression to a constant without modifying
12317 If the expression could be simplified to a constant, then return
12318 the constant. If the expression would not be simplified to a
12319 constant, then return NULL_TREE. */
12322 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12324 tree tem = fold_unary (code, type, op0);
12325 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12328 /* If EXP represents referencing an element in a constant string
12329 (either via pointer arithmetic or array indexing), return the
12330 tree representing the value accessed, otherwise return NULL. */
12333 fold_read_from_constant_string (tree exp)
12335 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12337 tree exp1 = TREE_OPERAND (exp, 0);
12341 if (TREE_CODE (exp) == INDIRECT_REF)
12342 string = string_constant (exp1, &index);
12345 tree low_bound = array_ref_low_bound (exp);
12346 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12348 /* Optimize the special-case of a zero lower bound.
12350 We convert the low_bound to sizetype to avoid some problems
12351 with constant folding. (E.g. suppose the lower bound is 1,
12352 and its mode is QI. Without the conversion,l (ARRAY
12353 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12354 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12355 if (! integer_zerop (low_bound))
12356 index = size_diffop (index, fold_convert (sizetype, low_bound));
12362 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12363 && TREE_CODE (string) == STRING_CST
12364 && TREE_CODE (index) == INTEGER_CST
12365 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12366 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12368 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12369 return fold_convert (TREE_TYPE (exp),
12370 build_int_cst (NULL_TREE,
12371 (TREE_STRING_POINTER (string)
12372 [TREE_INT_CST_LOW (index)])));
12377 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12378 an integer constant or real constant.
12380 TYPE is the type of the result. */
12383 fold_negate_const (tree arg0, tree type)
12385 tree t = NULL_TREE;
12387 switch (TREE_CODE (arg0))
12391 unsigned HOST_WIDE_INT low;
12392 HOST_WIDE_INT high;
12393 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12394 TREE_INT_CST_HIGH (arg0),
12396 t = build_int_cst_wide (type, low, high);
12397 t = force_fit_type (t, 1,
12398 (overflow | TREE_OVERFLOW (arg0))
12399 && !TYPE_UNSIGNED (type),
12400 TREE_CONSTANT_OVERFLOW (arg0));
12405 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12409 gcc_unreachable ();
12415 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12416 an integer constant or real constant.
12418 TYPE is the type of the result. */
12421 fold_abs_const (tree arg0, tree type)
12423 tree t = NULL_TREE;
12425 switch (TREE_CODE (arg0))
12428 /* If the value is unsigned, then the absolute value is
12429 the same as the ordinary value. */
12430 if (TYPE_UNSIGNED (type))
12432 /* Similarly, if the value is non-negative. */
12433 else if (INT_CST_LT (integer_minus_one_node, arg0))
12435 /* If the value is negative, then the absolute value is
12439 unsigned HOST_WIDE_INT low;
12440 HOST_WIDE_INT high;
12441 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12442 TREE_INT_CST_HIGH (arg0),
12444 t = build_int_cst_wide (type, low, high);
12445 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12446 TREE_CONSTANT_OVERFLOW (arg0));
12451 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12452 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12458 gcc_unreachable ();
12464 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12465 constant. TYPE is the type of the result. */
12468 fold_not_const (tree arg0, tree type)
12470 tree t = NULL_TREE;
12472 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12474 t = build_int_cst_wide (type,
12475 ~ TREE_INT_CST_LOW (arg0),
12476 ~ TREE_INT_CST_HIGH (arg0));
12477 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12478 TREE_CONSTANT_OVERFLOW (arg0));
12483 /* Given CODE, a relational operator, the target type, TYPE and two
12484 constant operands OP0 and OP1, return the result of the
12485 relational operation. If the result is not a compile time
12486 constant, then return NULL_TREE. */
12489 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12491 int result, invert;
12493 /* From here on, the only cases we handle are when the result is
12494 known to be a constant. */
12496 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12498 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12499 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12501 /* Handle the cases where either operand is a NaN. */
12502 if (real_isnan (c0) || real_isnan (c1))
12512 case UNORDERED_EXPR:
12526 if (flag_trapping_math)
12532 gcc_unreachable ();
12535 return constant_boolean_node (result, type);
12538 return constant_boolean_node (real_compare (code, c0, c1), type);
12541 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12543 To compute GT, swap the arguments and do LT.
12544 To compute GE, do LT and invert the result.
12545 To compute LE, swap the arguments, do LT and invert the result.
12546 To compute NE, do EQ and invert the result.
12548 Therefore, the code below must handle only EQ and LT. */
12550 if (code == LE_EXPR || code == GT_EXPR)
12555 code = swap_tree_comparison (code);
12558 /* Note that it is safe to invert for real values here because we
12559 have already handled the one case that it matters. */
12562 if (code == NE_EXPR || code == GE_EXPR)
12565 code = invert_tree_comparison (code, false);
12568 /* Compute a result for LT or EQ if args permit;
12569 Otherwise return T. */
12570 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12572 if (code == EQ_EXPR)
12573 result = tree_int_cst_equal (op0, op1);
12574 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12575 result = INT_CST_LT_UNSIGNED (op0, op1);
12577 result = INT_CST_LT (op0, op1);
12584 return constant_boolean_node (result, type);
12587 /* Build an expression for the a clean point containing EXPR with type TYPE.
12588 Don't build a cleanup point expression for EXPR which don't have side
12592 fold_build_cleanup_point_expr (tree type, tree expr)
12594 /* If the expression does not have side effects then we don't have to wrap
12595 it with a cleanup point expression. */
12596 if (!TREE_SIDE_EFFECTS (expr))
12599 /* If the expression is a return, check to see if the expression inside the
12600 return has no side effects or the right hand side of the modify expression
12601 inside the return. If either don't have side effects set we don't need to
12602 wrap the expression in a cleanup point expression. Note we don't check the
12603 left hand side of the modify because it should always be a return decl. */
12604 if (TREE_CODE (expr) == RETURN_EXPR)
12606 tree op = TREE_OPERAND (expr, 0);
12607 if (!op || !TREE_SIDE_EFFECTS (op))
12609 op = TREE_OPERAND (op, 1);
12610 if (!TREE_SIDE_EFFECTS (op))
12614 return build1 (CLEANUP_POINT_EXPR, type, expr);
12617 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12618 avoid confusing the gimplify process. */
12621 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12623 /* The size of the object is not relevant when talking about its address. */
12624 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12625 t = TREE_OPERAND (t, 0);
12627 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12628 if (TREE_CODE (t) == INDIRECT_REF
12629 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12631 t = TREE_OPERAND (t, 0);
12632 if (TREE_TYPE (t) != ptrtype)
12633 t = build1 (NOP_EXPR, ptrtype, t);
12639 while (handled_component_p (base))
12640 base = TREE_OPERAND (base, 0);
12642 TREE_ADDRESSABLE (base) = 1;
12644 t = build1 (ADDR_EXPR, ptrtype, t);
12651 build_fold_addr_expr (tree t)
12653 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12656 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12657 of an indirection through OP0, or NULL_TREE if no simplification is
12661 fold_indirect_ref_1 (tree type, tree op0)
12667 subtype = TREE_TYPE (sub);
12668 if (!POINTER_TYPE_P (subtype))
12671 if (TREE_CODE (sub) == ADDR_EXPR)
12673 tree op = TREE_OPERAND (sub, 0);
12674 tree optype = TREE_TYPE (op);
12675 /* *&p => p; make sure to handle *&"str"[cst] here. */
12676 if (type == optype)
12678 tree fop = fold_read_from_constant_string (op);
12684 /* *(foo *)&fooarray => fooarray[0] */
12685 else if (TREE_CODE (optype) == ARRAY_TYPE
12686 && type == TREE_TYPE (optype))
12688 tree type_domain = TYPE_DOMAIN (optype);
12689 tree min_val = size_zero_node;
12690 if (type_domain && TYPE_MIN_VALUE (type_domain))
12691 min_val = TYPE_MIN_VALUE (type_domain);
12692 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12694 /* *(foo *)&complexfoo => __real__ complexfoo */
12695 else if (TREE_CODE (optype) == COMPLEX_TYPE
12696 && type == TREE_TYPE (optype))
12697 return fold_build1 (REALPART_EXPR, type, op);
12700 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12701 if (TREE_CODE (sub) == PLUS_EXPR
12702 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12704 tree op00 = TREE_OPERAND (sub, 0);
12705 tree op01 = TREE_OPERAND (sub, 1);
12709 op00type = TREE_TYPE (op00);
12710 if (TREE_CODE (op00) == ADDR_EXPR
12711 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12712 && type == TREE_TYPE (TREE_TYPE (op00type)))
12714 tree size = TYPE_SIZE_UNIT (type);
12715 if (tree_int_cst_equal (size, op01))
12716 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12720 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12721 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12722 && type == TREE_TYPE (TREE_TYPE (subtype)))
12725 tree min_val = size_zero_node;
12726 sub = build_fold_indirect_ref (sub);
12727 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12728 if (type_domain && TYPE_MIN_VALUE (type_domain))
12729 min_val = TYPE_MIN_VALUE (type_domain);
12730 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12736 /* Builds an expression for an indirection through T, simplifying some
12740 build_fold_indirect_ref (tree t)
12742 tree type = TREE_TYPE (TREE_TYPE (t));
12743 tree sub = fold_indirect_ref_1 (type, t);
12748 return build1 (INDIRECT_REF, type, t);
12751 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12754 fold_indirect_ref (tree t)
12756 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12764 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12765 whose result is ignored. The type of the returned tree need not be
12766 the same as the original expression. */
12769 fold_ignored_result (tree t)
12771 if (!TREE_SIDE_EFFECTS (t))
12772 return integer_zero_node;
12775 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12778 t = TREE_OPERAND (t, 0);
12782 case tcc_comparison:
12783 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12784 t = TREE_OPERAND (t, 0);
12785 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12786 t = TREE_OPERAND (t, 1);
12791 case tcc_expression:
12792 switch (TREE_CODE (t))
12794 case COMPOUND_EXPR:
12795 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12797 t = TREE_OPERAND (t, 0);
12801 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12802 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12804 t = TREE_OPERAND (t, 0);
12817 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12818 This can only be applied to objects of a sizetype. */
12821 round_up (tree value, int divisor)
12823 tree div = NULL_TREE;
12825 gcc_assert (divisor > 0);
12829 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12830 have to do anything. Only do this when we are not given a const,
12831 because in that case, this check is more expensive than just
12833 if (TREE_CODE (value) != INTEGER_CST)
12835 div = build_int_cst (TREE_TYPE (value), divisor);
12837 if (multiple_of_p (TREE_TYPE (value), value, div))
12841 /* If divisor is a power of two, simplify this to bit manipulation. */
12842 if (divisor == (divisor & -divisor))
12846 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12847 value = size_binop (PLUS_EXPR, value, t);
12848 t = build_int_cst (TREE_TYPE (value), -divisor);
12849 value = size_binop (BIT_AND_EXPR, value, t);
12854 div = build_int_cst (TREE_TYPE (value), divisor);
12855 value = size_binop (CEIL_DIV_EXPR, value, div);
12856 value = size_binop (MULT_EXPR, value, div);
12862 /* Likewise, but round down. */
12865 round_down (tree value, int divisor)
12867 tree div = NULL_TREE;
12869 gcc_assert (divisor > 0);
12873 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12874 have to do anything. Only do this when we are not given a const,
12875 because in that case, this check is more expensive than just
12877 if (TREE_CODE (value) != INTEGER_CST)
12879 div = build_int_cst (TREE_TYPE (value), divisor);
12881 if (multiple_of_p (TREE_TYPE (value), value, div))
12885 /* If divisor is a power of two, simplify this to bit manipulation. */
12886 if (divisor == (divisor & -divisor))
12890 t = build_int_cst (TREE_TYPE (value), -divisor);
12891 value = size_binop (BIT_AND_EXPR, value, t);
12896 div = build_int_cst (TREE_TYPE (value), divisor);
12897 value = size_binop (FLOOR_DIV_EXPR, value, div);
12898 value = size_binop (MULT_EXPR, value, div);
12904 /* Returns the pointer to the base of the object addressed by EXP and
12905 extracts the information about the offset of the access, storing it
12906 to PBITPOS and POFFSET. */
12909 split_address_to_core_and_offset (tree exp,
12910 HOST_WIDE_INT *pbitpos, tree *poffset)
12913 enum machine_mode mode;
12914 int unsignedp, volatilep;
12915 HOST_WIDE_INT bitsize;
12917 if (TREE_CODE (exp) == ADDR_EXPR)
12919 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12920 poffset, &mode, &unsignedp, &volatilep,
12922 core = build_fold_addr_expr (core);
12928 *poffset = NULL_TREE;
12934 /* Returns true if addresses of E1 and E2 differ by a constant, false
12935 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12938 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12941 HOST_WIDE_INT bitpos1, bitpos2;
12942 tree toffset1, toffset2, tdiff, type;
12944 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12945 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12947 if (bitpos1 % BITS_PER_UNIT != 0
12948 || bitpos2 % BITS_PER_UNIT != 0
12949 || !operand_equal_p (core1, core2, 0))
12952 if (toffset1 && toffset2)
12954 type = TREE_TYPE (toffset1);
12955 if (type != TREE_TYPE (toffset2))
12956 toffset2 = fold_convert (type, toffset2);
12958 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12959 if (!cst_and_fits_in_hwi (tdiff))
12962 *diff = int_cst_value (tdiff);
12964 else if (toffset1 || toffset2)
12966 /* If only one of the offsets is non-constant, the difference cannot
12973 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12977 /* Simplify the floating point expression EXP when the sign of the
12978 result is not significant. Return NULL_TREE if no simplification
12982 fold_strip_sign_ops (tree exp)
12986 switch (TREE_CODE (exp))
12990 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12991 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12995 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12997 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12998 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12999 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13000 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13001 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13002 arg1 ? arg1 : TREE_OPERAND (exp, 1));