1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 || (overflowable > 0 && sign_extended_type))
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
286 TREE_CONSTANT_OVERFLOW (t) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Each argument is given as two `HOST_WIDE_INT' pieces.
295 One argument is L1 and H1; the other, L2 and H2.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
299 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
300 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
301 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
303 unsigned HOST_WIDE_INT l;
307 h = h1 + h2 + (l < l1);
311 return OVERFLOW_SUM_SIGN (h1, h2, h);
314 /* Negate a doubleword integer with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
327 return (*hv & h1) < 0;
337 /* Multiply two doubleword integers with doubleword result.
338 Return nonzero if the operation overflows, assuming it's signed.
339 Each argument is given as two `HOST_WIDE_INT' pieces.
340 One argument is L1 and H1; the other, L2 and H2.
341 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
345 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 HOST_WIDE_INT arg1[4];
349 HOST_WIDE_INT arg2[4];
350 HOST_WIDE_INT prod[4 * 2];
351 unsigned HOST_WIDE_INT carry;
353 unsigned HOST_WIDE_INT toplow, neglow;
354 HOST_WIDE_INT tophigh, neghigh;
356 encode (arg1, l1, h1);
357 encode (arg2, l2, h2);
359 memset (prod, 0, sizeof prod);
361 for (i = 0; i < 4; i++)
364 for (j = 0; j < 4; j++)
367 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
368 carry += arg1[i] * arg2[j];
369 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
371 prod[k] = LOWPART (carry);
372 carry = HIGHPART (carry);
377 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
379 /* Check for overflow by calculating the top half of the answer in full;
380 it should agree with the low half's sign bit. */
381 decode (prod + 4, &toplow, &tophigh);
384 neg_double (l2, h2, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
389 neg_double (l1, h1, &neglow, &neghigh);
390 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
392 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
395 /* Shift the doubleword integer in L1, H1 left by COUNT places
396 keeping only PREC bits of result.
397 Shift right if COUNT is negative.
398 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
402 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
403 HOST_WIDE_INT count, unsigned int prec,
404 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
406 unsigned HOST_WIDE_INT signmask;
410 rshift_double (l1, h1, -count, prec, lv, hv, arith);
414 if (SHIFT_COUNT_TRUNCATED)
417 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
419 /* Shifting by the host word size is undefined according to the
420 ANSI standard, so we must handle this as a special case. */
424 else if (count >= HOST_BITS_PER_WIDE_INT)
426 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
431 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
432 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
436 /* Sign extend all bits that are beyond the precision. */
438 signmask = -((prec > HOST_BITS_PER_WIDE_INT
439 ? ((unsigned HOST_WIDE_INT) *hv
440 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
441 : (*lv >> (prec - 1))) & 1);
443 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
445 else if (prec >= HOST_BITS_PER_WIDE_INT)
447 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
448 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
453 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
454 *lv |= signmask << prec;
458 /* Shift the doubleword integer in L1, H1 right by COUNT places
459 keeping only PREC bits of result. COUNT must be positive.
460 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
461 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
464 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
465 HOST_WIDE_INT count, unsigned int prec,
466 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
469 unsigned HOST_WIDE_INT signmask;
472 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
475 if (SHIFT_COUNT_TRUNCATED)
478 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
480 /* Shifting by the host word size is undefined according to the
481 ANSI standard, so we must handle this as a special case. */
485 else if (count >= HOST_BITS_PER_WIDE_INT)
488 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
492 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
494 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
497 /* Zero / sign extend all bits that are beyond the precision. */
499 if (count >= (HOST_WIDE_INT)prec)
504 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
506 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
508 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
509 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
514 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
515 *lv |= signmask << (prec - count);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result.
521 Rotate right if COUNT is negative.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
525 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
526 HOST_WIDE_INT count, unsigned int prec,
527 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
529 unsigned HOST_WIDE_INT s1l, s2l;
530 HOST_WIDE_INT s1h, s2h;
536 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
537 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
542 /* Rotate the doubleword integer in L1, H1 left by COUNT places
543 keeping only PREC bits of result. COUNT must be positive.
544 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
547 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
548 HOST_WIDE_INT count, unsigned int prec,
549 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
551 unsigned HOST_WIDE_INT s1l, s2l;
552 HOST_WIDE_INT s1h, s2h;
558 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
559 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
564 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
565 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
566 CODE is a tree code for a kind of division, one of
567 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
569 It controls how the quotient is rounded to an integer.
570 Return nonzero if the operation overflows.
571 UNS nonzero says do unsigned division. */
574 div_and_round_double (enum tree_code code, int uns,
575 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
576 HOST_WIDE_INT hnum_orig,
577 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
578 HOST_WIDE_INT hden_orig,
579 unsigned HOST_WIDE_INT *lquo,
580 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
584 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
585 HOST_WIDE_INT den[4], quo[4];
587 unsigned HOST_WIDE_INT work;
588 unsigned HOST_WIDE_INT carry = 0;
589 unsigned HOST_WIDE_INT lnum = lnum_orig;
590 HOST_WIDE_INT hnum = hnum_orig;
591 unsigned HOST_WIDE_INT lden = lden_orig;
592 HOST_WIDE_INT hden = hden_orig;
595 if (hden == 0 && lden == 0)
596 overflow = 1, lden = 1;
598 /* Calculate quotient sign and convert operands to unsigned. */
604 /* (minimum integer) / (-1) is the only overflow case. */
605 if (neg_double (lnum, hnum, &lnum, &hnum)
606 && ((HOST_WIDE_INT) lden & hden) == -1)
612 neg_double (lden, hden, &lden, &hden);
616 if (hnum == 0 && hden == 0)
617 { /* single precision */
619 /* This unsigned division rounds toward zero. */
625 { /* trivial case: dividend < divisor */
626 /* hden != 0 already checked. */
633 memset (quo, 0, sizeof quo);
635 memset (num, 0, sizeof num); /* to zero 9th element */
636 memset (den, 0, sizeof den);
638 encode (num, lnum, hnum);
639 encode (den, lden, hden);
641 /* Special code for when the divisor < BASE. */
642 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
644 /* hnum != 0 already checked. */
645 for (i = 4 - 1; i >= 0; i--)
647 work = num[i] + carry * BASE;
648 quo[i] = work / lden;
654 /* Full double precision division,
655 with thanks to Don Knuth's "Seminumerical Algorithms". */
656 int num_hi_sig, den_hi_sig;
657 unsigned HOST_WIDE_INT quo_est, scale;
659 /* Find the highest nonzero divisor digit. */
660 for (i = 4 - 1;; i--)
667 /* Insure that the first digit of the divisor is at least BASE/2.
668 This is required by the quotient digit estimation algorithm. */
670 scale = BASE / (den[den_hi_sig] + 1);
672 { /* scale divisor and dividend */
674 for (i = 0; i <= 4 - 1; i++)
676 work = (num[i] * scale) + carry;
677 num[i] = LOWPART (work);
678 carry = HIGHPART (work);
683 for (i = 0; i <= 4 - 1; i++)
685 work = (den[i] * scale) + carry;
686 den[i] = LOWPART (work);
687 carry = HIGHPART (work);
688 if (den[i] != 0) den_hi_sig = i;
695 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
697 /* Guess the next quotient digit, quo_est, by dividing the first
698 two remaining dividend digits by the high order quotient digit.
699 quo_est is never low and is at most 2 high. */
700 unsigned HOST_WIDE_INT tmp;
702 num_hi_sig = i + den_hi_sig + 1;
703 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
704 if (num[num_hi_sig] != den[den_hi_sig])
705 quo_est = work / den[den_hi_sig];
709 /* Refine quo_est so it's usually correct, and at most one high. */
710 tmp = work - quo_est * den[den_hi_sig];
712 && (den[den_hi_sig - 1] * quo_est
713 > (tmp * BASE + num[num_hi_sig - 2])))
716 /* Try QUO_EST as the quotient digit, by multiplying the
717 divisor by QUO_EST and subtracting from the remaining dividend.
718 Keep in mind that QUO_EST is the I - 1st digit. */
721 for (j = 0; j <= den_hi_sig; j++)
723 work = quo_est * den[j] + carry;
724 carry = HIGHPART (work);
725 work = num[i + j] - LOWPART (work);
726 num[i + j] = LOWPART (work);
727 carry += HIGHPART (work) != 0;
730 /* If quo_est was high by one, then num[i] went negative and
731 we need to correct things. */
732 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
735 carry = 0; /* add divisor back in */
736 for (j = 0; j <= den_hi_sig; j++)
738 work = num[i + j] + den[j] + carry;
739 carry = HIGHPART (work);
740 num[i + j] = LOWPART (work);
743 num [num_hi_sig] += carry;
746 /* Store the quotient digit. */
751 decode (quo, lquo, hquo);
754 /* If result is negative, make it so. */
756 neg_double (*lquo, *hquo, lquo, hquo);
758 /* Compute trial remainder: rem = num - (quo * den) */
759 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
760 neg_double (*lrem, *hrem, lrem, hrem);
761 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
766 case TRUNC_MOD_EXPR: /* round toward zero */
767 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
771 case FLOOR_MOD_EXPR: /* round toward negative infinity */
772 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
775 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
783 case CEIL_MOD_EXPR: /* round toward positive infinity */
784 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
794 case ROUND_MOD_EXPR: /* round to closest integer */
796 unsigned HOST_WIDE_INT labs_rem = *lrem;
797 HOST_WIDE_INT habs_rem = *hrem;
798 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
799 HOST_WIDE_INT habs_den = hden, htwice;
801 /* Get absolute values. */
803 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
805 neg_double (lden, hden, &labs_den, &habs_den);
807 /* If (2 * abs (lrem) >= abs (lden)) */
808 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
809 labs_rem, habs_rem, <wice, &htwice);
811 if (((unsigned HOST_WIDE_INT) habs_den
812 < (unsigned HOST_WIDE_INT) htwice)
813 || (((unsigned HOST_WIDE_INT) habs_den
814 == (unsigned HOST_WIDE_INT) htwice)
815 && (labs_den < ltwice)))
819 add_double (*lquo, *hquo,
820 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
835 /* Compute true remainder: rem = num - (quo * den) */
836 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
837 neg_double (*lrem, *hrem, lrem, hrem);
838 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
842 /* If ARG2 divides ARG1 with zero remainder, carries out the division
843 of type CODE and returns the quotient.
844 Otherwise returns NULL_TREE. */
847 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
849 unsigned HOST_WIDE_INT int1l, int2l;
850 HOST_WIDE_INT int1h, int2h;
851 unsigned HOST_WIDE_INT quol, reml;
852 HOST_WIDE_INT quoh, remh;
853 tree type = TREE_TYPE (arg1);
854 int uns = TYPE_UNSIGNED (type);
856 int1l = TREE_INT_CST_LOW (arg1);
857 int1h = TREE_INT_CST_HIGH (arg1);
858 int2l = TREE_INT_CST_LOW (arg2);
859 int2h = TREE_INT_CST_HIGH (arg2);
861 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
862 &quol, &quoh, &reml, &remh);
863 if (remh != 0 || reml != 0)
866 return build_int_cst_wide (type, quol, quoh);
869 /* Return true if the built-in mathematical function specified by CODE
870 is odd, i.e. -f(x) == f(-x). */
873 negate_mathfn_p (enum built_in_function code)
877 CASE_FLT_FN (BUILT_IN_ASIN):
878 CASE_FLT_FN (BUILT_IN_ASINH):
879 CASE_FLT_FN (BUILT_IN_ATAN):
880 CASE_FLT_FN (BUILT_IN_ATANH):
881 CASE_FLT_FN (BUILT_IN_CBRT):
882 CASE_FLT_FN (BUILT_IN_SIN):
883 CASE_FLT_FN (BUILT_IN_SINH):
884 CASE_FLT_FN (BUILT_IN_TAN):
885 CASE_FLT_FN (BUILT_IN_TANH):
894 /* Check whether we may negate an integer constant T without causing
898 may_negate_without_overflow_p (tree t)
900 unsigned HOST_WIDE_INT val;
904 gcc_assert (TREE_CODE (t) == INTEGER_CST);
906 type = TREE_TYPE (t);
907 if (TYPE_UNSIGNED (type))
910 prec = TYPE_PRECISION (type);
911 if (prec > HOST_BITS_PER_WIDE_INT)
913 if (TREE_INT_CST_LOW (t) != 0)
915 prec -= HOST_BITS_PER_WIDE_INT;
916 val = TREE_INT_CST_HIGH (t);
919 val = TREE_INT_CST_LOW (t);
920 if (prec < HOST_BITS_PER_WIDE_INT)
921 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
922 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
925 /* Determine whether an expression T can be cheaply negated using
926 the function negate_expr. */
929 negate_expr_p (tree t)
936 type = TREE_TYPE (t);
939 switch (TREE_CODE (t))
942 if (TYPE_UNSIGNED (type) || ! flag_trapv)
945 /* Check that -CST will not overflow type. */
946 return may_negate_without_overflow_p (t);
948 return INTEGRAL_TYPE_P (type)
949 && (TYPE_UNSIGNED (type)
950 || (flag_wrapv && !flag_trapv));
957 return negate_expr_p (TREE_REALPART (t))
958 && negate_expr_p (TREE_IMAGPART (t));
961 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
963 /* -(A + B) -> (-B) - A. */
964 if (negate_expr_p (TREE_OPERAND (t, 1))
965 && reorder_operands_p (TREE_OPERAND (t, 0),
966 TREE_OPERAND (t, 1)))
968 /* -(A + B) -> (-A) - B. */
969 return negate_expr_p (TREE_OPERAND (t, 0));
972 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
973 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
974 && reorder_operands_p (TREE_OPERAND (t, 0),
975 TREE_OPERAND (t, 1));
978 if (TYPE_UNSIGNED (TREE_TYPE (t)))
984 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
994 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
996 return negate_expr_p (TREE_OPERAND (t, 1))
997 || negate_expr_p (TREE_OPERAND (t, 0));
1000 /* Negate -((double)float) as (double)(-float). */
1001 if (TREE_CODE (type) == REAL_TYPE)
1003 tree tem = strip_float_extensions (t);
1005 return negate_expr_p (tem);
1010 /* Negate -f(x) as f(-x). */
1011 if (negate_mathfn_p (builtin_mathfn_code (t)))
1012 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1016 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1017 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1019 tree op1 = TREE_OPERAND (t, 1);
1020 if (TREE_INT_CST_HIGH (op1) == 0
1021 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1022 == TREE_INT_CST_LOW (op1))
1033 /* Given T, an expression, return the negation of T. Allow for T to be
1034 null, in which case return null. */
1037 negate_expr (tree t)
1045 type = TREE_TYPE (t);
1046 STRIP_SIGN_NOPS (t);
1048 switch (TREE_CODE (t))
1050 /* Convert - (~A) to A + 1. */
1052 if (INTEGRAL_TYPE_P (type)
1053 && (TYPE_UNSIGNED (type)
1054 || (flag_wrapv && !flag_trapv)))
1055 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1056 build_int_cst (type, 1));
1060 tem = fold_negate_const (t, type);
1061 if (! TREE_OVERFLOW (tem)
1062 || TYPE_UNSIGNED (type)
1068 tem = fold_negate_const (t, type);
1069 /* Two's complement FP formats, such as c4x, may overflow. */
1070 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1071 return fold_convert (type, tem);
1076 tree rpart = negate_expr (TREE_REALPART (t));
1077 tree ipart = negate_expr (TREE_IMAGPART (t));
1079 if ((TREE_CODE (rpart) == REAL_CST
1080 && TREE_CODE (ipart) == REAL_CST)
1081 || (TREE_CODE (rpart) == INTEGER_CST
1082 && TREE_CODE (ipart) == INTEGER_CST))
1083 return build_complex (type, rpart, ipart);
1088 return fold_convert (type, TREE_OPERAND (t, 0));
1091 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1093 /* -(A + B) -> (-B) - A. */
1094 if (negate_expr_p (TREE_OPERAND (t, 1))
1095 && reorder_operands_p (TREE_OPERAND (t, 0),
1096 TREE_OPERAND (t, 1)))
1098 tem = negate_expr (TREE_OPERAND (t, 1));
1099 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1100 tem, TREE_OPERAND (t, 0));
1101 return fold_convert (type, tem);
1104 /* -(A + B) -> (-A) - B. */
1105 if (negate_expr_p (TREE_OPERAND (t, 0)))
1107 tem = negate_expr (TREE_OPERAND (t, 0));
1108 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1109 tem, TREE_OPERAND (t, 1));
1110 return fold_convert (type, tem);
1116 /* - (A - B) -> B - A */
1117 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1118 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1119 return fold_convert (type,
1120 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1121 TREE_OPERAND (t, 1),
1122 TREE_OPERAND (t, 0)));
1126 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1132 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1134 tem = TREE_OPERAND (t, 1);
1135 if (negate_expr_p (tem))
1136 return fold_convert (type,
1137 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1138 TREE_OPERAND (t, 0),
1139 negate_expr (tem)));
1140 tem = TREE_OPERAND (t, 0);
1141 if (negate_expr_p (tem))
1142 return fold_convert (type,
1143 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1145 TREE_OPERAND (t, 1)));
1149 case TRUNC_DIV_EXPR:
1150 case ROUND_DIV_EXPR:
1151 case FLOOR_DIV_EXPR:
1153 case EXACT_DIV_EXPR:
1154 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1156 tem = TREE_OPERAND (t, 1);
1157 if (negate_expr_p (tem))
1158 return fold_convert (type,
1159 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1160 TREE_OPERAND (t, 0),
1161 negate_expr (tem)));
1162 tem = TREE_OPERAND (t, 0);
1163 if (negate_expr_p (tem))
1164 return fold_convert (type,
1165 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1167 TREE_OPERAND (t, 1)));
1172 /* Convert -((double)float) into (double)(-float). */
1173 if (TREE_CODE (type) == REAL_TYPE)
1175 tem = strip_float_extensions (t);
1176 if (tem != t && negate_expr_p (tem))
1177 return fold_convert (type, negate_expr (tem));
1182 /* Negate -f(x) as f(-x). */
1183 if (negate_mathfn_p (builtin_mathfn_code (t))
1184 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1186 tree fndecl, arg, arglist;
1188 fndecl = get_callee_fndecl (t);
1189 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1190 arglist = build_tree_list (NULL_TREE, arg);
1191 return build_function_call_expr (fndecl, arglist);
1196 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1197 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1199 tree op1 = TREE_OPERAND (t, 1);
1200 if (TREE_INT_CST_HIGH (op1) == 0
1201 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1202 == TREE_INT_CST_LOW (op1))
1204 tree ntype = TYPE_UNSIGNED (type)
1205 ? lang_hooks.types.signed_type (type)
1206 : lang_hooks.types.unsigned_type (type);
1207 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1208 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1209 return fold_convert (type, temp);
1218 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1219 return fold_convert (type, tem);
1222 /* Split a tree IN into a constant, literal and variable parts that could be
1223 combined with CODE to make IN. "constant" means an expression with
1224 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1225 commutative arithmetic operation. Store the constant part into *CONP,
1226 the literal in *LITP and return the variable part. If a part isn't
1227 present, set it to null. If the tree does not decompose in this way,
1228 return the entire tree as the variable part and the other parts as null.
1230 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1231 case, we negate an operand that was subtracted. Except if it is a
1232 literal for which we use *MINUS_LITP instead.
1234 If NEGATE_P is true, we are negating all of IN, again except a literal
1235 for which we use *MINUS_LITP instead.
1237 If IN is itself a literal or constant, return it as appropriate.
1239 Note that we do not guarantee that any of the three values will be the
1240 same type as IN, but they will have the same signedness and mode. */
1243 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1244 tree *minus_litp, int negate_p)
1252 /* Strip any conversions that don't change the machine mode or signedness. */
1253 STRIP_SIGN_NOPS (in);
1255 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1257 else if (TREE_CODE (in) == code
1258 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1259 /* We can associate addition and subtraction together (even
1260 though the C standard doesn't say so) for integers because
1261 the value is not affected. For reals, the value might be
1262 affected, so we can't. */
1263 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1264 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1266 tree op0 = TREE_OPERAND (in, 0);
1267 tree op1 = TREE_OPERAND (in, 1);
1268 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1269 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1271 /* First see if either of the operands is a literal, then a constant. */
1272 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1273 *litp = op0, op0 = 0;
1274 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1275 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1277 if (op0 != 0 && TREE_CONSTANT (op0))
1278 *conp = op0, op0 = 0;
1279 else if (op1 != 0 && TREE_CONSTANT (op1))
1280 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1282 /* If we haven't dealt with either operand, this is not a case we can
1283 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1284 if (op0 != 0 && op1 != 0)
1289 var = op1, neg_var_p = neg1_p;
1291 /* Now do any needed negations. */
1293 *minus_litp = *litp, *litp = 0;
1295 *conp = negate_expr (*conp);
1297 var = negate_expr (var);
1299 else if (TREE_CONSTANT (in))
1307 *minus_litp = *litp, *litp = 0;
1308 else if (*minus_litp)
1309 *litp = *minus_litp, *minus_litp = 0;
1310 *conp = negate_expr (*conp);
1311 var = negate_expr (var);
1317 /* Re-associate trees split by the above function. T1 and T2 are either
1318 expressions to associate or null. Return the new expression, if any. If
1319 we build an operation, do it in TYPE and with CODE. */
1322 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1329 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1330 try to fold this since we will have infinite recursion. But do
1331 deal with any NEGATE_EXPRs. */
1332 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1333 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1335 if (code == PLUS_EXPR)
1337 if (TREE_CODE (t1) == NEGATE_EXPR)
1338 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1339 fold_convert (type, TREE_OPERAND (t1, 0)));
1340 else if (TREE_CODE (t2) == NEGATE_EXPR)
1341 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1342 fold_convert (type, TREE_OPERAND (t2, 0)));
1343 else if (integer_zerop (t2))
1344 return fold_convert (type, t1);
1346 else if (code == MINUS_EXPR)
1348 if (integer_zerop (t2))
1349 return fold_convert (type, t1);
1352 return build2 (code, type, fold_convert (type, t1),
1353 fold_convert (type, t2));
1356 return fold_build2 (code, type, fold_convert (type, t1),
1357 fold_convert (type, t2));
1360 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1361 to produce a new constant. Return NULL_TREE if we don't know how
1362 to evaluate CODE at compile-time.
1364 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1367 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1369 unsigned HOST_WIDE_INT int1l, int2l;
1370 HOST_WIDE_INT int1h, int2h;
1371 unsigned HOST_WIDE_INT low;
1373 unsigned HOST_WIDE_INT garbagel;
1374 HOST_WIDE_INT garbageh;
1376 tree type = TREE_TYPE (arg1);
1377 int uns = TYPE_UNSIGNED (type);
1379 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1382 int1l = TREE_INT_CST_LOW (arg1);
1383 int1h = TREE_INT_CST_HIGH (arg1);
1384 int2l = TREE_INT_CST_LOW (arg2);
1385 int2h = TREE_INT_CST_HIGH (arg2);
1390 low = int1l | int2l, hi = int1h | int2h;
1394 low = int1l ^ int2l, hi = int1h ^ int2h;
1398 low = int1l & int2l, hi = int1h & int2h;
1404 /* It's unclear from the C standard whether shifts can overflow.
1405 The following code ignores overflow; perhaps a C standard
1406 interpretation ruling is needed. */
1407 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1414 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1419 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1423 neg_double (int2l, int2h, &low, &hi);
1424 add_double (int1l, int1h, low, hi, &low, &hi);
1425 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1429 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1432 case TRUNC_DIV_EXPR:
1433 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1434 case EXACT_DIV_EXPR:
1435 /* This is a shortcut for a common special case. */
1436 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1437 && ! TREE_CONSTANT_OVERFLOW (arg1)
1438 && ! TREE_CONSTANT_OVERFLOW (arg2)
1439 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1441 if (code == CEIL_DIV_EXPR)
1444 low = int1l / int2l, hi = 0;
1448 /* ... fall through ... */
1450 case ROUND_DIV_EXPR:
1451 if (int2h == 0 && int2l == 0)
1453 if (int2h == 0 && int2l == 1)
1455 low = int1l, hi = int1h;
1458 if (int1l == int2l && int1h == int2h
1459 && ! (int1l == 0 && int1h == 0))
1464 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1465 &low, &hi, &garbagel, &garbageh);
1468 case TRUNC_MOD_EXPR:
1469 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1470 /* This is a shortcut for a common special case. */
1471 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1472 && ! TREE_CONSTANT_OVERFLOW (arg1)
1473 && ! TREE_CONSTANT_OVERFLOW (arg2)
1474 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1476 if (code == CEIL_MOD_EXPR)
1478 low = int1l % int2l, hi = 0;
1482 /* ... fall through ... */
1484 case ROUND_MOD_EXPR:
1485 if (int2h == 0 && int2l == 0)
1487 overflow = div_and_round_double (code, uns,
1488 int1l, int1h, int2l, int2h,
1489 &garbagel, &garbageh, &low, &hi);
1495 low = (((unsigned HOST_WIDE_INT) int1h
1496 < (unsigned HOST_WIDE_INT) int2h)
1497 || (((unsigned HOST_WIDE_INT) int1h
1498 == (unsigned HOST_WIDE_INT) int2h)
1501 low = (int1h < int2h
1502 || (int1h == int2h && int1l < int2l));
1504 if (low == (code == MIN_EXPR))
1505 low = int1l, hi = int1h;
1507 low = int2l, hi = int2h;
1514 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1518 /* Propagate overflow flags ourselves. */
1519 if (((!uns || is_sizetype) && overflow)
1520 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1523 TREE_OVERFLOW (t) = 1;
1524 TREE_CONSTANT_OVERFLOW (t) = 1;
1526 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1529 TREE_CONSTANT_OVERFLOW (t) = 1;
1533 t = force_fit_type (t, 1,
1534 ((!uns || is_sizetype) && overflow)
1535 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1536 TREE_CONSTANT_OVERFLOW (arg1)
1537 | TREE_CONSTANT_OVERFLOW (arg2));
1542 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1543 constant. We assume ARG1 and ARG2 have the same data type, or at least
1544 are the same kind of constant and the same machine mode.
1546 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1549 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1554 if (TREE_CODE (arg1) == INTEGER_CST)
1555 return int_const_binop (code, arg1, arg2, notrunc);
1557 if (TREE_CODE (arg1) == REAL_CST)
1559 enum machine_mode mode;
1562 REAL_VALUE_TYPE value;
1563 REAL_VALUE_TYPE result;
1567 /* The following codes are handled by real_arithmetic. */
1582 d1 = TREE_REAL_CST (arg1);
1583 d2 = TREE_REAL_CST (arg2);
1585 type = TREE_TYPE (arg1);
1586 mode = TYPE_MODE (type);
1588 /* Don't perform operation if we honor signaling NaNs and
1589 either operand is a NaN. */
1590 if (HONOR_SNANS (mode)
1591 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1594 /* Don't perform operation if it would raise a division
1595 by zero exception. */
1596 if (code == RDIV_EXPR
1597 && REAL_VALUES_EQUAL (d2, dconst0)
1598 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1601 /* If either operand is a NaN, just return it. Otherwise, set up
1602 for floating-point trap; we return an overflow. */
1603 if (REAL_VALUE_ISNAN (d1))
1605 else if (REAL_VALUE_ISNAN (d2))
1608 inexact = real_arithmetic (&value, code, &d1, &d2);
1609 real_convert (&result, mode, &value);
1611 /* Don't constant fold this floating point operation if
1612 the result has overflowed and flag_trapping_math. */
1614 if (flag_trapping_math
1615 && MODE_HAS_INFINITIES (mode)
1616 && REAL_VALUE_ISINF (result)
1617 && !REAL_VALUE_ISINF (d1)
1618 && !REAL_VALUE_ISINF (d2))
1621 /* Don't constant fold this floating point operation if the
1622 result may dependent upon the run-time rounding mode and
1623 flag_rounding_math is set, or if GCC's software emulation
1624 is unable to accurately represent the result. */
1626 if ((flag_rounding_math
1627 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1628 && !flag_unsafe_math_optimizations))
1629 && (inexact || !real_identical (&result, &value)))
1632 t = build_real (type, result);
1634 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1635 TREE_CONSTANT_OVERFLOW (t)
1637 | TREE_CONSTANT_OVERFLOW (arg1)
1638 | TREE_CONSTANT_OVERFLOW (arg2);
1642 if (TREE_CODE (arg1) == COMPLEX_CST)
1644 tree type = TREE_TYPE (arg1);
1645 tree r1 = TREE_REALPART (arg1);
1646 tree i1 = TREE_IMAGPART (arg1);
1647 tree r2 = TREE_REALPART (arg2);
1648 tree i2 = TREE_IMAGPART (arg2);
1654 t = build_complex (type,
1655 const_binop (PLUS_EXPR, r1, r2, notrunc),
1656 const_binop (PLUS_EXPR, i1, i2, notrunc));
1660 t = build_complex (type,
1661 const_binop (MINUS_EXPR, r1, r2, notrunc),
1662 const_binop (MINUS_EXPR, i1, i2, notrunc));
1666 t = build_complex (type,
1667 const_binop (MINUS_EXPR,
1668 const_binop (MULT_EXPR,
1670 const_binop (MULT_EXPR,
1673 const_binop (PLUS_EXPR,
1674 const_binop (MULT_EXPR,
1676 const_binop (MULT_EXPR,
1683 tree t1, t2, real, imag;
1685 = const_binop (PLUS_EXPR,
1686 const_binop (MULT_EXPR, r2, r2, notrunc),
1687 const_binop (MULT_EXPR, i2, i2, notrunc),
1690 t1 = const_binop (PLUS_EXPR,
1691 const_binop (MULT_EXPR, r1, r2, notrunc),
1692 const_binop (MULT_EXPR, i1, i2, notrunc),
1694 t2 = const_binop (MINUS_EXPR,
1695 const_binop (MULT_EXPR, i1, r2, notrunc),
1696 const_binop (MULT_EXPR, r1, i2, notrunc),
1699 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1701 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1702 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1706 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1707 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1712 t = build_complex (type, real, imag);
1724 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1725 indicates which particular sizetype to create. */
1728 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1730 return build_int_cst (sizetype_tab[(int) kind], number);
1733 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1734 is a tree code. The type of the result is taken from the operands.
1735 Both must be the same type integer type and it must be a size type.
1736 If the operands are constant, so is the result. */
1739 size_binop (enum tree_code code, tree arg0, tree arg1)
1741 tree type = TREE_TYPE (arg0);
1743 if (arg0 == error_mark_node || arg1 == error_mark_node)
1744 return error_mark_node;
1746 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1747 && type == TREE_TYPE (arg1));
1749 /* Handle the special case of two integer constants faster. */
1750 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1752 /* And some specific cases even faster than that. */
1753 if (code == PLUS_EXPR && integer_zerop (arg0))
1755 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1756 && integer_zerop (arg1))
1758 else if (code == MULT_EXPR && integer_onep (arg0))
1761 /* Handle general case of two integer constants. */
1762 return int_const_binop (code, arg0, arg1, 0);
1765 return fold_build2 (code, type, arg0, arg1);
1768 /* Given two values, either both of sizetype or both of bitsizetype,
1769 compute the difference between the two values. Return the value
1770 in signed type corresponding to the type of the operands. */
1773 size_diffop (tree arg0, tree arg1)
1775 tree type = TREE_TYPE (arg0);
1778 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1779 && type == TREE_TYPE (arg1));
1781 /* If the type is already signed, just do the simple thing. */
1782 if (!TYPE_UNSIGNED (type))
1783 return size_binop (MINUS_EXPR, arg0, arg1);
1785 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1787 /* If either operand is not a constant, do the conversions to the signed
1788 type and subtract. The hardware will do the right thing with any
1789 overflow in the subtraction. */
1790 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1791 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1792 fold_convert (ctype, arg1));
1794 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1795 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1796 overflow) and negate (which can't either). Special-case a result
1797 of zero while we're here. */
1798 if (tree_int_cst_equal (arg0, arg1))
1799 return build_int_cst (ctype, 0);
1800 else if (tree_int_cst_lt (arg1, arg0))
1801 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1803 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1804 fold_convert (ctype, size_binop (MINUS_EXPR,
1808 /* A subroutine of fold_convert_const handling conversions of an
1809 INTEGER_CST to another integer type. */
1812 fold_convert_const_int_from_int (tree type, tree arg1)
1816 /* Given an integer constant, make new constant with new type,
1817 appropriately sign-extended or truncated. */
1818 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1819 TREE_INT_CST_HIGH (arg1));
1821 t = force_fit_type (t,
1822 /* Don't set the overflow when
1823 converting a pointer */
1824 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1825 (TREE_INT_CST_HIGH (arg1) < 0
1826 && (TYPE_UNSIGNED (type)
1827 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1828 | TREE_OVERFLOW (arg1),
1829 TREE_CONSTANT_OVERFLOW (arg1));
1834 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1835 to an integer type. */
1838 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1843 /* The following code implements the floating point to integer
1844 conversion rules required by the Java Language Specification,
1845 that IEEE NaNs are mapped to zero and values that overflow
1846 the target precision saturate, i.e. values greater than
1847 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1848 are mapped to INT_MIN. These semantics are allowed by the
1849 C and C++ standards that simply state that the behavior of
1850 FP-to-integer conversion is unspecified upon overflow. */
1852 HOST_WIDE_INT high, low;
1854 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1858 case FIX_TRUNC_EXPR:
1859 real_trunc (&r, VOIDmode, &x);
1863 real_ceil (&r, VOIDmode, &x);
1866 case FIX_FLOOR_EXPR:
1867 real_floor (&r, VOIDmode, &x);
1870 case FIX_ROUND_EXPR:
1871 real_round (&r, VOIDmode, &x);
1878 /* If R is NaN, return zero and show we have an overflow. */
1879 if (REAL_VALUE_ISNAN (r))
1886 /* See if R is less than the lower bound or greater than the
1891 tree lt = TYPE_MIN_VALUE (type);
1892 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1893 if (REAL_VALUES_LESS (r, l))
1896 high = TREE_INT_CST_HIGH (lt);
1897 low = TREE_INT_CST_LOW (lt);
1903 tree ut = TYPE_MAX_VALUE (type);
1906 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1907 if (REAL_VALUES_LESS (u, r))
1910 high = TREE_INT_CST_HIGH (ut);
1911 low = TREE_INT_CST_LOW (ut);
1917 REAL_VALUE_TO_INT (&low, &high, r);
1919 t = build_int_cst_wide (type, low, high);
1921 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1922 TREE_CONSTANT_OVERFLOW (arg1));
1926 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1927 to another floating point type. */
1930 fold_convert_const_real_from_real (tree type, tree arg1)
1932 REAL_VALUE_TYPE value;
1935 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1936 t = build_real (type, value);
1938 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1939 TREE_CONSTANT_OVERFLOW (t)
1940 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1944 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1945 type TYPE. If no simplification can be done return NULL_TREE. */
1948 fold_convert_const (enum tree_code code, tree type, tree arg1)
1950 if (TREE_TYPE (arg1) == type)
1953 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1955 if (TREE_CODE (arg1) == INTEGER_CST)
1956 return fold_convert_const_int_from_int (type, arg1);
1957 else if (TREE_CODE (arg1) == REAL_CST)
1958 return fold_convert_const_int_from_real (code, type, arg1);
1960 else if (TREE_CODE (type) == REAL_TYPE)
1962 if (TREE_CODE (arg1) == INTEGER_CST)
1963 return build_real_from_int_cst (type, arg1);
1964 if (TREE_CODE (arg1) == REAL_CST)
1965 return fold_convert_const_real_from_real (type, arg1);
1970 /* Construct a vector of zero elements of vector type TYPE. */
1973 build_zero_vector (tree type)
1978 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1979 units = TYPE_VECTOR_SUBPARTS (type);
1982 for (i = 0; i < units; i++)
1983 list = tree_cons (NULL_TREE, elem, list);
1984 return build_vector (type, list);
1987 /* Convert expression ARG to type TYPE. Used by the middle-end for
1988 simple conversions in preference to calling the front-end's convert. */
1991 fold_convert (tree type, tree arg)
1993 tree orig = TREE_TYPE (arg);
1999 if (TREE_CODE (arg) == ERROR_MARK
2000 || TREE_CODE (type) == ERROR_MARK
2001 || TREE_CODE (orig) == ERROR_MARK)
2002 return error_mark_node;
2004 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2005 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2006 TYPE_MAIN_VARIANT (orig)))
2007 return fold_build1 (NOP_EXPR, type, arg);
2009 switch (TREE_CODE (type))
2011 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2012 case POINTER_TYPE: case REFERENCE_TYPE:
2014 if (TREE_CODE (arg) == INTEGER_CST)
2016 tem = fold_convert_const (NOP_EXPR, type, arg);
2017 if (tem != NULL_TREE)
2020 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == OFFSET_TYPE)
2022 return fold_build1 (NOP_EXPR, type, arg);
2023 if (TREE_CODE (orig) == COMPLEX_TYPE)
2025 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert (type, tem);
2028 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2029 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030 return fold_build1 (NOP_EXPR, type, arg);
2033 if (TREE_CODE (arg) == INTEGER_CST)
2035 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2036 if (tem != NULL_TREE)
2039 else if (TREE_CODE (arg) == REAL_CST)
2041 tem = fold_convert_const (NOP_EXPR, type, arg);
2042 if (tem != NULL_TREE)
2046 switch (TREE_CODE (orig))
2049 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2050 case POINTER_TYPE: case REFERENCE_TYPE:
2051 return fold_build1 (FLOAT_EXPR, type, arg);
2054 return fold_build1 (NOP_EXPR, type, arg);
2057 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2058 return fold_convert (type, tem);
2065 switch (TREE_CODE (orig))
2068 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2069 case POINTER_TYPE: case REFERENCE_TYPE:
2071 return build2 (COMPLEX_EXPR, type,
2072 fold_convert (TREE_TYPE (type), arg),
2073 fold_convert (TREE_TYPE (type), integer_zero_node));
2078 if (TREE_CODE (arg) == COMPLEX_EXPR)
2080 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2081 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 arg = save_expr (arg);
2086 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2087 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2088 rpart = fold_convert (TREE_TYPE (type), rpart);
2089 ipart = fold_convert (TREE_TYPE (type), ipart);
2090 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2098 if (integer_zerop (arg))
2099 return build_zero_vector (type);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2102 || TREE_CODE (orig) == VECTOR_TYPE);
2103 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2106 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2113 /* Return false if expr can be assumed not to be an lvalue, true
2117 maybe_lvalue_p (tree x)
2119 /* We only need to wrap lvalue tree codes. */
2120 switch (TREE_CODE (x))
2131 case ALIGN_INDIRECT_REF:
2132 case MISALIGNED_INDIRECT_REF:
2134 case ARRAY_RANGE_REF:
2140 case PREINCREMENT_EXPR:
2141 case PREDECREMENT_EXPR:
2143 case TRY_CATCH_EXPR:
2144 case WITH_CLEANUP_EXPR:
2155 /* Assume the worst for front-end tree codes. */
2156 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2164 /* Return an expr equal to X but certainly not valid as an lvalue. */
2169 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2174 if (! maybe_lvalue_p (x))
2176 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2179 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2180 Zero means allow extended lvalues. */
2182 int pedantic_lvalues;
2184 /* When pedantic, return an expr equal to X but certainly not valid as a
2185 pedantic lvalue. Otherwise, return X. */
2188 pedantic_non_lvalue (tree x)
2190 if (pedantic_lvalues)
2191 return non_lvalue (x);
2196 /* Given a tree comparison code, return the code that is the logical inverse
2197 of the given code. It is not safe to do this for floating-point
2198 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2199 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2202 invert_tree_comparison (enum tree_code code, bool honor_nans)
2204 if (honor_nans && flag_trapping_math)
2214 return honor_nans ? UNLE_EXPR : LE_EXPR;
2216 return honor_nans ? UNLT_EXPR : LT_EXPR;
2218 return honor_nans ? UNGE_EXPR : GE_EXPR;
2220 return honor_nans ? UNGT_EXPR : GT_EXPR;
2234 return UNORDERED_EXPR;
2235 case UNORDERED_EXPR:
2236 return ORDERED_EXPR;
2242 /* Similar, but return the comparison that results if the operands are
2243 swapped. This is safe for floating-point. */
2246 swap_tree_comparison (enum tree_code code)
2253 case UNORDERED_EXPR:
2279 /* Convert a comparison tree code from an enum tree_code representation
2280 into a compcode bit-based encoding. This function is the inverse of
2281 compcode_to_comparison. */
2283 static enum comparison_code
2284 comparison_to_compcode (enum tree_code code)
2301 return COMPCODE_ORD;
2302 case UNORDERED_EXPR:
2303 return COMPCODE_UNORD;
2305 return COMPCODE_UNLT;
2307 return COMPCODE_UNEQ;
2309 return COMPCODE_UNLE;
2311 return COMPCODE_UNGT;
2313 return COMPCODE_LTGT;
2315 return COMPCODE_UNGE;
2321 /* Convert a compcode bit-based encoding of a comparison operator back
2322 to GCC's enum tree_code representation. This function is the
2323 inverse of comparison_to_compcode. */
2325 static enum tree_code
2326 compcode_to_comparison (enum comparison_code code)
2343 return ORDERED_EXPR;
2344 case COMPCODE_UNORD:
2345 return UNORDERED_EXPR;
2363 /* Return a tree for the comparison which is the combination of
2364 doing the AND or OR (depending on CODE) of the two operations LCODE
2365 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2366 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2367 if this makes the transformation invalid. */
2370 combine_comparisons (enum tree_code code, enum tree_code lcode,
2371 enum tree_code rcode, tree truth_type,
2372 tree ll_arg, tree lr_arg)
2374 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2375 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2376 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2377 enum comparison_code compcode;
2381 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2382 compcode = lcompcode & rcompcode;
2385 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2386 compcode = lcompcode | rcompcode;
2395 /* Eliminate unordered comparisons, as well as LTGT and ORD
2396 which are not used unless the mode has NaNs. */
2397 compcode &= ~COMPCODE_UNORD;
2398 if (compcode == COMPCODE_LTGT)
2399 compcode = COMPCODE_NE;
2400 else if (compcode == COMPCODE_ORD)
2401 compcode = COMPCODE_TRUE;
2403 else if (flag_trapping_math)
2405 /* Check that the original operation and the optimized ones will trap
2406 under the same condition. */
2407 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2408 && (lcompcode != COMPCODE_EQ)
2409 && (lcompcode != COMPCODE_ORD);
2410 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2411 && (rcompcode != COMPCODE_EQ)
2412 && (rcompcode != COMPCODE_ORD);
2413 bool trap = (compcode & COMPCODE_UNORD) == 0
2414 && (compcode != COMPCODE_EQ)
2415 && (compcode != COMPCODE_ORD);
2417 /* In a short-circuited boolean expression the LHS might be
2418 such that the RHS, if evaluated, will never trap. For
2419 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2420 if neither x nor y is NaN. (This is a mixed blessing: for
2421 example, the expression above will never trap, hence
2422 optimizing it to x < y would be invalid). */
2423 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2424 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2427 /* If the comparison was short-circuited, and only the RHS
2428 trapped, we may now generate a spurious trap. */
2430 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2433 /* If we changed the conditions that cause a trap, we lose. */
2434 if ((ltrap || rtrap) != trap)
2438 if (compcode == COMPCODE_TRUE)
2439 return constant_boolean_node (true, truth_type);
2440 else if (compcode == COMPCODE_FALSE)
2441 return constant_boolean_node (false, truth_type);
2443 return fold_build2 (compcode_to_comparison (compcode),
2444 truth_type, ll_arg, lr_arg);
2447 /* Return nonzero if CODE is a tree code that represents a truth value. */
2450 truth_value_p (enum tree_code code)
2452 return (TREE_CODE_CLASS (code) == tcc_comparison
2453 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2454 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2455 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2458 /* Return nonzero if two operands (typically of the same tree node)
2459 are necessarily equal. If either argument has side-effects this
2460 function returns zero. FLAGS modifies behavior as follows:
2462 If OEP_ONLY_CONST is set, only return nonzero for constants.
2463 This function tests whether the operands are indistinguishable;
2464 it does not test whether they are equal using C's == operation.
2465 The distinction is important for IEEE floating point, because
2466 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2467 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2469 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2470 even though it may hold multiple values during a function.
2471 This is because a GCC tree node guarantees that nothing else is
2472 executed between the evaluation of its "operands" (which may often
2473 be evaluated in arbitrary order). Hence if the operands themselves
2474 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2475 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2476 unset means assuming isochronic (or instantaneous) tree equivalence.
2477 Unless comparing arbitrary expression trees, such as from different
2478 statements, this flag can usually be left unset.
2480 If OEP_PURE_SAME is set, then pure functions with identical arguments
2481 are considered the same. It is used when the caller has other ways
2482 to ensure that global memory is unchanged in between. */
2485 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2487 /* If either is ERROR_MARK, they aren't equal. */
2488 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2491 /* If both types don't have the same signedness, then we can't consider
2492 them equal. We must check this before the STRIP_NOPS calls
2493 because they may change the signedness of the arguments. */
2494 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2500 /* In case both args are comparisons but with different comparison
2501 code, try to swap the comparison operands of one arg to produce
2502 a match and compare that variant. */
2503 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2504 && COMPARISON_CLASS_P (arg0)
2505 && COMPARISON_CLASS_P (arg1))
2507 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2509 if (TREE_CODE (arg0) == swap_code)
2510 return operand_equal_p (TREE_OPERAND (arg0, 0),
2511 TREE_OPERAND (arg1, 1), flags)
2512 && operand_equal_p (TREE_OPERAND (arg0, 1),
2513 TREE_OPERAND (arg1, 0), flags);
2516 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2517 /* This is needed for conversions and for COMPONENT_REF.
2518 Might as well play it safe and always test this. */
2519 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2520 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2521 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2524 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2525 We don't care about side effects in that case because the SAVE_EXPR
2526 takes care of that for us. In all other cases, two expressions are
2527 equal if they have no side effects. If we have two identical
2528 expressions with side effects that should be treated the same due
2529 to the only side effects being identical SAVE_EXPR's, that will
2530 be detected in the recursive calls below. */
2531 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2532 && (TREE_CODE (arg0) == SAVE_EXPR
2533 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2536 /* Next handle constant cases, those for which we can return 1 even
2537 if ONLY_CONST is set. */
2538 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2539 switch (TREE_CODE (arg0))
2542 return (! TREE_CONSTANT_OVERFLOW (arg0)
2543 && ! TREE_CONSTANT_OVERFLOW (arg1)
2544 && tree_int_cst_equal (arg0, arg1));
2547 return (! TREE_CONSTANT_OVERFLOW (arg0)
2548 && ! TREE_CONSTANT_OVERFLOW (arg1)
2549 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2550 TREE_REAL_CST (arg1)));
2556 if (TREE_CONSTANT_OVERFLOW (arg0)
2557 || TREE_CONSTANT_OVERFLOW (arg1))
2560 v1 = TREE_VECTOR_CST_ELTS (arg0);
2561 v2 = TREE_VECTOR_CST_ELTS (arg1);
2564 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2567 v1 = TREE_CHAIN (v1);
2568 v2 = TREE_CHAIN (v2);
2575 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2577 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2581 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2582 && ! memcmp (TREE_STRING_POINTER (arg0),
2583 TREE_STRING_POINTER (arg1),
2584 TREE_STRING_LENGTH (arg0)));
2587 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2593 if (flags & OEP_ONLY_CONST)
2596 /* Define macros to test an operand from arg0 and arg1 for equality and a
2597 variant that allows null and views null as being different from any
2598 non-null value. In the latter case, if either is null, the both
2599 must be; otherwise, do the normal comparison. */
2600 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2601 TREE_OPERAND (arg1, N), flags)
2603 #define OP_SAME_WITH_NULL(N) \
2604 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2605 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2607 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2610 /* Two conversions are equal only if signedness and modes match. */
2611 switch (TREE_CODE (arg0))
2616 case FIX_TRUNC_EXPR:
2617 case FIX_FLOOR_EXPR:
2618 case FIX_ROUND_EXPR:
2619 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2620 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2630 case tcc_comparison:
2632 if (OP_SAME (0) && OP_SAME (1))
2635 /* For commutative ops, allow the other order. */
2636 return (commutative_tree_code (TREE_CODE (arg0))
2637 && operand_equal_p (TREE_OPERAND (arg0, 0),
2638 TREE_OPERAND (arg1, 1), flags)
2639 && operand_equal_p (TREE_OPERAND (arg0, 1),
2640 TREE_OPERAND (arg1, 0), flags));
2643 /* If either of the pointer (or reference) expressions we are
2644 dereferencing contain a side effect, these cannot be equal. */
2645 if (TREE_SIDE_EFFECTS (arg0)
2646 || TREE_SIDE_EFFECTS (arg1))
2649 switch (TREE_CODE (arg0))
2652 case ALIGN_INDIRECT_REF:
2653 case MISALIGNED_INDIRECT_REF:
2659 case ARRAY_RANGE_REF:
2660 /* Operands 2 and 3 may be null. */
2663 && OP_SAME_WITH_NULL (2)
2664 && OP_SAME_WITH_NULL (3));
2667 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2668 may be NULL when we're called to compare MEM_EXPRs. */
2669 return OP_SAME_WITH_NULL (0)
2671 && OP_SAME_WITH_NULL (2);
2674 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2680 case tcc_expression:
2681 switch (TREE_CODE (arg0))
2684 case TRUTH_NOT_EXPR:
2687 case TRUTH_ANDIF_EXPR:
2688 case TRUTH_ORIF_EXPR:
2689 return OP_SAME (0) && OP_SAME (1);
2691 case TRUTH_AND_EXPR:
2693 case TRUTH_XOR_EXPR:
2694 if (OP_SAME (0) && OP_SAME (1))
2697 /* Otherwise take into account this is a commutative operation. */
2698 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2699 TREE_OPERAND (arg1, 1), flags)
2700 && operand_equal_p (TREE_OPERAND (arg0, 1),
2701 TREE_OPERAND (arg1, 0), flags));
2704 /* If the CALL_EXPRs call different functions, then they
2705 clearly can not be equal. */
2710 unsigned int cef = call_expr_flags (arg0);
2711 if (flags & OEP_PURE_SAME)
2712 cef &= ECF_CONST | ECF_PURE;
2719 /* Now see if all the arguments are the same. operand_equal_p
2720 does not handle TREE_LIST, so we walk the operands here
2721 feeding them to operand_equal_p. */
2722 arg0 = TREE_OPERAND (arg0, 1);
2723 arg1 = TREE_OPERAND (arg1, 1);
2724 while (arg0 && arg1)
2726 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2730 arg0 = TREE_CHAIN (arg0);
2731 arg1 = TREE_CHAIN (arg1);
2734 /* If we get here and both argument lists are exhausted
2735 then the CALL_EXPRs are equal. */
2736 return ! (arg0 || arg1);
2742 case tcc_declaration:
2743 /* Consider __builtin_sqrt equal to sqrt. */
2744 return (TREE_CODE (arg0) == FUNCTION_DECL
2745 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2746 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2747 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2754 #undef OP_SAME_WITH_NULL
2757 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2758 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2760 When in doubt, return 0. */
2763 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2765 int unsignedp1, unsignedpo;
2766 tree primarg0, primarg1, primother;
2767 unsigned int correct_width;
2769 if (operand_equal_p (arg0, arg1, 0))
2772 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2773 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2776 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2777 and see if the inner values are the same. This removes any
2778 signedness comparison, which doesn't matter here. */
2779 primarg0 = arg0, primarg1 = arg1;
2780 STRIP_NOPS (primarg0);
2781 STRIP_NOPS (primarg1);
2782 if (operand_equal_p (primarg0, primarg1, 0))
2785 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2786 actual comparison operand, ARG0.
2788 First throw away any conversions to wider types
2789 already present in the operands. */
2791 primarg1 = get_narrower (arg1, &unsignedp1);
2792 primother = get_narrower (other, &unsignedpo);
2794 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2795 if (unsignedp1 == unsignedpo
2796 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2797 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2799 tree type = TREE_TYPE (arg0);
2801 /* Make sure shorter operand is extended the right way
2802 to match the longer operand. */
2803 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2804 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2806 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2813 /* See if ARG is an expression that is either a comparison or is performing
2814 arithmetic on comparisons. The comparisons must only be comparing
2815 two different values, which will be stored in *CVAL1 and *CVAL2; if
2816 they are nonzero it means that some operands have already been found.
2817 No variables may be used anywhere else in the expression except in the
2818 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2819 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2821 If this is true, return 1. Otherwise, return zero. */
2824 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2826 enum tree_code code = TREE_CODE (arg);
2827 enum tree_code_class class = TREE_CODE_CLASS (code);
2829 /* We can handle some of the tcc_expression cases here. */
2830 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2832 else if (class == tcc_expression
2833 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2834 || code == COMPOUND_EXPR))
2837 else if (class == tcc_expression && code == SAVE_EXPR
2838 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2840 /* If we've already found a CVAL1 or CVAL2, this expression is
2841 two complex to handle. */
2842 if (*cval1 || *cval2)
2852 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2855 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2856 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2857 cval1, cval2, save_p));
2862 case tcc_expression:
2863 if (code == COND_EXPR)
2864 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2865 cval1, cval2, save_p)
2866 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2867 cval1, cval2, save_p)
2868 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2869 cval1, cval2, save_p));
2872 case tcc_comparison:
2873 /* First see if we can handle the first operand, then the second. For
2874 the second operand, we know *CVAL1 can't be zero. It must be that
2875 one side of the comparison is each of the values; test for the
2876 case where this isn't true by failing if the two operands
2879 if (operand_equal_p (TREE_OPERAND (arg, 0),
2880 TREE_OPERAND (arg, 1), 0))
2884 *cval1 = TREE_OPERAND (arg, 0);
2885 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2887 else if (*cval2 == 0)
2888 *cval2 = TREE_OPERAND (arg, 0);
2889 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2894 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2896 else if (*cval2 == 0)
2897 *cval2 = TREE_OPERAND (arg, 1);
2898 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2910 /* ARG is a tree that is known to contain just arithmetic operations and
2911 comparisons. Evaluate the operations in the tree substituting NEW0 for
2912 any occurrence of OLD0 as an operand of a comparison and likewise for
2916 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2918 tree type = TREE_TYPE (arg);
2919 enum tree_code code = TREE_CODE (arg);
2920 enum tree_code_class class = TREE_CODE_CLASS (code);
2922 /* We can handle some of the tcc_expression cases here. */
2923 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2925 else if (class == tcc_expression
2926 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2932 return fold_build1 (code, type,
2933 eval_subst (TREE_OPERAND (arg, 0),
2934 old0, new0, old1, new1));
2937 return fold_build2 (code, type,
2938 eval_subst (TREE_OPERAND (arg, 0),
2939 old0, new0, old1, new1),
2940 eval_subst (TREE_OPERAND (arg, 1),
2941 old0, new0, old1, new1));
2943 case tcc_expression:
2947 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2950 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2953 return fold_build3 (code, type,
2954 eval_subst (TREE_OPERAND (arg, 0),
2955 old0, new0, old1, new1),
2956 eval_subst (TREE_OPERAND (arg, 1),
2957 old0, new0, old1, new1),
2958 eval_subst (TREE_OPERAND (arg, 2),
2959 old0, new0, old1, new1));
2963 /* Fall through - ??? */
2965 case tcc_comparison:
2967 tree arg0 = TREE_OPERAND (arg, 0);
2968 tree arg1 = TREE_OPERAND (arg, 1);
2970 /* We need to check both for exact equality and tree equality. The
2971 former will be true if the operand has a side-effect. In that
2972 case, we know the operand occurred exactly once. */
2974 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2976 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2979 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2981 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2984 return fold_build2 (code, type, arg0, arg1);
2992 /* Return a tree for the case when the result of an expression is RESULT
2993 converted to TYPE and OMITTED was previously an operand of the expression
2994 but is now not needed (e.g., we folded OMITTED * 0).
2996 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2997 the conversion of RESULT to TYPE. */
3000 omit_one_operand (tree type, tree result, tree omitted)
3002 tree t = fold_convert (type, result);
3004 if (TREE_SIDE_EFFECTS (omitted))
3005 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3007 return non_lvalue (t);
3010 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3013 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3015 tree t = fold_convert (type, result);
3017 if (TREE_SIDE_EFFECTS (omitted))
3018 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3020 return pedantic_non_lvalue (t);
3023 /* Return a tree for the case when the result of an expression is RESULT
3024 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3025 of the expression but are now not needed.
3027 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3028 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3029 evaluated before OMITTED2. Otherwise, if neither has side effects,
3030 just do the conversion of RESULT to TYPE. */
3033 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3035 tree t = fold_convert (type, result);
3037 if (TREE_SIDE_EFFECTS (omitted2))
3038 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3039 if (TREE_SIDE_EFFECTS (omitted1))
3040 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3042 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3046 /* Return a simplified tree node for the truth-negation of ARG. This
3047 never alters ARG itself. We assume that ARG is an operation that
3048 returns a truth value (0 or 1).
3050 FIXME: one would think we would fold the result, but it causes
3051 problems with the dominator optimizer. */
3053 invert_truthvalue (tree arg)
3055 tree type = TREE_TYPE (arg);
3056 enum tree_code code = TREE_CODE (arg);
3058 if (code == ERROR_MARK)
3061 /* If this is a comparison, we can simply invert it, except for
3062 floating-point non-equality comparisons, in which case we just
3063 enclose a TRUTH_NOT_EXPR around what we have. */
3065 if (TREE_CODE_CLASS (code) == tcc_comparison)
3067 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3068 if (FLOAT_TYPE_P (op_type)
3069 && flag_trapping_math
3070 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3071 && code != NE_EXPR && code != EQ_EXPR)
3072 return build1 (TRUTH_NOT_EXPR, type, arg);
3075 code = invert_tree_comparison (code,
3076 HONOR_NANS (TYPE_MODE (op_type)));
3077 if (code == ERROR_MARK)
3078 return build1 (TRUTH_NOT_EXPR, type, arg);
3080 return build2 (code, type,
3081 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3088 return constant_boolean_node (integer_zerop (arg), type);
3090 case TRUTH_AND_EXPR:
3091 return build2 (TRUTH_OR_EXPR, type,
3092 invert_truthvalue (TREE_OPERAND (arg, 0)),
3093 invert_truthvalue (TREE_OPERAND (arg, 1)));
3096 return build2 (TRUTH_AND_EXPR, type,
3097 invert_truthvalue (TREE_OPERAND (arg, 0)),
3098 invert_truthvalue (TREE_OPERAND (arg, 1)));
3100 case TRUTH_XOR_EXPR:
3101 /* Here we can invert either operand. We invert the first operand
3102 unless the second operand is a TRUTH_NOT_EXPR in which case our
3103 result is the XOR of the first operand with the inside of the
3104 negation of the second operand. */
3106 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3107 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3108 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3110 return build2 (TRUTH_XOR_EXPR, type,
3111 invert_truthvalue (TREE_OPERAND (arg, 0)),
3112 TREE_OPERAND (arg, 1));
3114 case TRUTH_ANDIF_EXPR:
3115 return build2 (TRUTH_ORIF_EXPR, type,
3116 invert_truthvalue (TREE_OPERAND (arg, 0)),
3117 invert_truthvalue (TREE_OPERAND (arg, 1)));
3119 case TRUTH_ORIF_EXPR:
3120 return build2 (TRUTH_ANDIF_EXPR, type,
3121 invert_truthvalue (TREE_OPERAND (arg, 0)),
3122 invert_truthvalue (TREE_OPERAND (arg, 1)));
3124 case TRUTH_NOT_EXPR:
3125 return TREE_OPERAND (arg, 0);
3129 tree arg1 = TREE_OPERAND (arg, 1);
3130 tree arg2 = TREE_OPERAND (arg, 2);
3131 /* A COND_EXPR may have a throw as one operand, which
3132 then has void type. Just leave void operands
3134 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3135 VOID_TYPE_P (TREE_TYPE (arg1))
3136 ? arg1 : invert_truthvalue (arg1),
3137 VOID_TYPE_P (TREE_TYPE (arg2))
3138 ? arg2 : invert_truthvalue (arg2));
3142 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3143 invert_truthvalue (TREE_OPERAND (arg, 1)));
3145 case NON_LVALUE_EXPR:
3146 return invert_truthvalue (TREE_OPERAND (arg, 0));
3149 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3154 return build1 (TREE_CODE (arg), type,
3155 invert_truthvalue (TREE_OPERAND (arg, 0)));
3158 if (!integer_onep (TREE_OPERAND (arg, 1)))
3160 return build2 (EQ_EXPR, type, arg,
3161 build_int_cst (type, 0));
3164 return build1 (TRUTH_NOT_EXPR, type, arg);
3166 case CLEANUP_POINT_EXPR:
3167 return build1 (CLEANUP_POINT_EXPR, type,
3168 invert_truthvalue (TREE_OPERAND (arg, 0)));
3173 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3174 return build1 (TRUTH_NOT_EXPR, type, arg);
3177 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3178 operands are another bit-wise operation with a common input. If so,
3179 distribute the bit operations to save an operation and possibly two if
3180 constants are involved. For example, convert
3181 (A | B) & (A | C) into A | (B & C)
3182 Further simplification will occur if B and C are constants.
3184 If this optimization cannot be done, 0 will be returned. */
3187 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3192 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3193 || TREE_CODE (arg0) == code
3194 || (TREE_CODE (arg0) != BIT_AND_EXPR
3195 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3198 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3200 common = TREE_OPERAND (arg0, 0);
3201 left = TREE_OPERAND (arg0, 1);
3202 right = TREE_OPERAND (arg1, 1);
3204 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3206 common = TREE_OPERAND (arg0, 0);
3207 left = TREE_OPERAND (arg0, 1);
3208 right = TREE_OPERAND (arg1, 0);
3210 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3212 common = TREE_OPERAND (arg0, 1);
3213 left = TREE_OPERAND (arg0, 0);
3214 right = TREE_OPERAND (arg1, 1);
3216 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3218 common = TREE_OPERAND (arg0, 1);
3219 left = TREE_OPERAND (arg0, 0);
3220 right = TREE_OPERAND (arg1, 0);
3225 return fold_build2 (TREE_CODE (arg0), type, common,
3226 fold_build2 (code, type, left, right));
3229 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3230 with code CODE. This optimization is unsafe. */
3232 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3234 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3235 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3237 /* (A / C) +- (B / C) -> (A +- B) / C. */
3239 && operand_equal_p (TREE_OPERAND (arg0, 1),
3240 TREE_OPERAND (arg1, 1), 0))
3241 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3242 fold_build2 (code, type,
3243 TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0)),
3245 TREE_OPERAND (arg0, 1));
3247 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3248 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3249 TREE_OPERAND (arg1, 0), 0)
3250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3251 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3253 REAL_VALUE_TYPE r0, r1;
3254 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3255 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3257 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3259 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3260 real_arithmetic (&r0, code, &r0, &r1);
3261 return fold_build2 (MULT_EXPR, type,
3262 TREE_OPERAND (arg0, 0),
3263 build_real (type, r0));
3269 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3270 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3273 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3280 tree size = TYPE_SIZE (TREE_TYPE (inner));
3281 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3282 || POINTER_TYPE_P (TREE_TYPE (inner)))
3283 && host_integerp (size, 0)
3284 && tree_low_cst (size, 0) == bitsize)
3285 return fold_convert (type, inner);
3288 result = build3 (BIT_FIELD_REF, type, inner,
3289 size_int (bitsize), bitsize_int (bitpos));
3291 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3296 /* Optimize a bit-field compare.
3298 There are two cases: First is a compare against a constant and the
3299 second is a comparison of two items where the fields are at the same
3300 bit position relative to the start of a chunk (byte, halfword, word)
3301 large enough to contain it. In these cases we can avoid the shift
3302 implicit in bitfield extractions.
3304 For constants, we emit a compare of the shifted constant with the
3305 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3306 compared. For two fields at the same position, we do the ANDs with the
3307 similar mask and compare the result of the ANDs.
3309 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3310 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3311 are the left and right operands of the comparison, respectively.
3313 If the optimization described above can be done, we return the resulting
3314 tree. Otherwise we return zero. */
3317 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3320 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3321 tree type = TREE_TYPE (lhs);
3322 tree signed_type, unsigned_type;
3323 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3324 enum machine_mode lmode, rmode, nmode;
3325 int lunsignedp, runsignedp;
3326 int lvolatilep = 0, rvolatilep = 0;
3327 tree linner, rinner = NULL_TREE;
3331 /* Get all the information about the extractions being done. If the bit size
3332 if the same as the size of the underlying object, we aren't doing an
3333 extraction at all and so can do nothing. We also don't want to
3334 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3335 then will no longer be able to replace it. */
3336 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3337 &lunsignedp, &lvolatilep, false);
3338 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3339 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3344 /* If this is not a constant, we can only do something if bit positions,
3345 sizes, and signedness are the same. */
3346 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3347 &runsignedp, &rvolatilep, false);
3349 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3350 || lunsignedp != runsignedp || offset != 0
3351 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3355 /* See if we can find a mode to refer to this field. We should be able to,
3356 but fail if we can't. */
3357 nmode = get_best_mode (lbitsize, lbitpos,
3358 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3359 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3360 TYPE_ALIGN (TREE_TYPE (rinner))),
3361 word_mode, lvolatilep || rvolatilep);
3362 if (nmode == VOIDmode)
3365 /* Set signed and unsigned types of the precision of this mode for the
3367 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3368 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3370 /* Compute the bit position and size for the new reference and our offset
3371 within it. If the new reference is the same size as the original, we
3372 won't optimize anything, so return zero. */
3373 nbitsize = GET_MODE_BITSIZE (nmode);
3374 nbitpos = lbitpos & ~ (nbitsize - 1);
3376 if (nbitsize == lbitsize)
3379 if (BYTES_BIG_ENDIAN)
3380 lbitpos = nbitsize - lbitsize - lbitpos;
3382 /* Make the mask to be used against the extracted field. */
3383 mask = build_int_cst (unsigned_type, -1);
3384 mask = force_fit_type (mask, 0, false, false);
3385 mask = fold_convert (unsigned_type, mask);
3386 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3387 mask = const_binop (RSHIFT_EXPR, mask,
3388 size_int (nbitsize - lbitsize - lbitpos), 0);
3391 /* If not comparing with constant, just rework the comparison
3393 return build2 (code, compare_type,
3394 build2 (BIT_AND_EXPR, unsigned_type,
3395 make_bit_field_ref (linner, unsigned_type,
3396 nbitsize, nbitpos, 1),
3398 build2 (BIT_AND_EXPR, unsigned_type,
3399 make_bit_field_ref (rinner, unsigned_type,
3400 nbitsize, nbitpos, 1),
3403 /* Otherwise, we are handling the constant case. See if the constant is too
3404 big for the field. Warn and return a tree of for 0 (false) if so. We do
3405 this not only for its own sake, but to avoid having to test for this
3406 error case below. If we didn't, we might generate wrong code.
3408 For unsigned fields, the constant shifted right by the field length should
3409 be all zero. For signed fields, the high-order bits should agree with
3414 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3415 fold_convert (unsigned_type, rhs),
3416 size_int (lbitsize), 0)))
3418 warning (0, "comparison is always %d due to width of bit-field",
3420 return constant_boolean_node (code == NE_EXPR, compare_type);
3425 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3426 size_int (lbitsize - 1), 0);
3427 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3429 warning (0, "comparison is always %d due to width of bit-field",
3431 return constant_boolean_node (code == NE_EXPR, compare_type);
3435 /* Single-bit compares should always be against zero. */
3436 if (lbitsize == 1 && ! integer_zerop (rhs))
3438 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3439 rhs = build_int_cst (type, 0);
3442 /* Make a new bitfield reference, shift the constant over the
3443 appropriate number of bits and mask it with the computed mask
3444 (in case this was a signed field). If we changed it, make a new one. */
3445 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3448 TREE_SIDE_EFFECTS (lhs) = 1;
3449 TREE_THIS_VOLATILE (lhs) = 1;
3452 rhs = const_binop (BIT_AND_EXPR,
3453 const_binop (LSHIFT_EXPR,
3454 fold_convert (unsigned_type, rhs),
3455 size_int (lbitpos), 0),
3458 return build2 (code, compare_type,
3459 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3463 /* Subroutine for fold_truthop: decode a field reference.
3465 If EXP is a comparison reference, we return the innermost reference.
3467 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3468 set to the starting bit number.
3470 If the innermost field can be completely contained in a mode-sized
3471 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3473 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3474 otherwise it is not changed.
3476 *PUNSIGNEDP is set to the signedness of the field.
3478 *PMASK is set to the mask used. This is either contained in a
3479 BIT_AND_EXPR or derived from the width of the field.
3481 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3483 Return 0 if this is not a component reference or is one that we can't
3484 do anything with. */
3487 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3488 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3489 int *punsignedp, int *pvolatilep,
3490 tree *pmask, tree *pand_mask)
3492 tree outer_type = 0;
3494 tree mask, inner, offset;
3496 unsigned int precision;
3498 /* All the optimizations using this function assume integer fields.
3499 There are problems with FP fields since the type_for_size call
3500 below can fail for, e.g., XFmode. */
3501 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3504 /* We are interested in the bare arrangement of bits, so strip everything
3505 that doesn't affect the machine mode. However, record the type of the
3506 outermost expression if it may matter below. */
3507 if (TREE_CODE (exp) == NOP_EXPR
3508 || TREE_CODE (exp) == CONVERT_EXPR
3509 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3510 outer_type = TREE_TYPE (exp);
3513 if (TREE_CODE (exp) == BIT_AND_EXPR)
3515 and_mask = TREE_OPERAND (exp, 1);
3516 exp = TREE_OPERAND (exp, 0);
3517 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3518 if (TREE_CODE (and_mask) != INTEGER_CST)
3522 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3523 punsignedp, pvolatilep, false);
3524 if ((inner == exp && and_mask == 0)
3525 || *pbitsize < 0 || offset != 0
3526 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3529 /* If the number of bits in the reference is the same as the bitsize of
3530 the outer type, then the outer type gives the signedness. Otherwise
3531 (in case of a small bitfield) the signedness is unchanged. */
3532 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3533 *punsignedp = TYPE_UNSIGNED (outer_type);
3535 /* Compute the mask to access the bitfield. */
3536 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3537 precision = TYPE_PRECISION (unsigned_type);
3539 mask = build_int_cst (unsigned_type, -1);
3540 mask = force_fit_type (mask, 0, false, false);
3542 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3543 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3545 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3547 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3548 fold_convert (unsigned_type, and_mask), mask);
3551 *pand_mask = and_mask;
3555 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3559 all_ones_mask_p (tree mask, int size)
3561 tree type = TREE_TYPE (mask);
3562 unsigned int precision = TYPE_PRECISION (type);
3565 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3566 tmask = force_fit_type (tmask, 0, false, false);
3569 tree_int_cst_equal (mask,
3570 const_binop (RSHIFT_EXPR,
3571 const_binop (LSHIFT_EXPR, tmask,
3572 size_int (precision - size),
3574 size_int (precision - size), 0));
3577 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3578 represents the sign bit of EXP's type. If EXP represents a sign
3579 or zero extension, also test VAL against the unextended type.
3580 The return value is the (sub)expression whose sign bit is VAL,
3581 or NULL_TREE otherwise. */
3584 sign_bit_p (tree exp, tree val)
3586 unsigned HOST_WIDE_INT mask_lo, lo;
3587 HOST_WIDE_INT mask_hi, hi;
3591 /* Tree EXP must have an integral type. */
3592 t = TREE_TYPE (exp);
3593 if (! INTEGRAL_TYPE_P (t))
3596 /* Tree VAL must be an integer constant. */
3597 if (TREE_CODE (val) != INTEGER_CST
3598 || TREE_CONSTANT_OVERFLOW (val))
3601 width = TYPE_PRECISION (t);
3602 if (width > HOST_BITS_PER_WIDE_INT)
3604 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3607 mask_hi = ((unsigned HOST_WIDE_INT) -1
3608 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3614 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3617 mask_lo = ((unsigned HOST_WIDE_INT) -1
3618 >> (HOST_BITS_PER_WIDE_INT - width));
3621 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3622 treat VAL as if it were unsigned. */
3623 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3624 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3627 /* Handle extension from a narrower type. */
3628 if (TREE_CODE (exp) == NOP_EXPR
3629 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3630 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3635 /* Subroutine for fold_truthop: determine if an operand is simple enough
3636 to be evaluated unconditionally. */
3639 simple_operand_p (tree exp)
3641 /* Strip any conversions that don't change the machine mode. */
3644 return (CONSTANT_CLASS_P (exp)
3645 || TREE_CODE (exp) == SSA_NAME
3647 && ! TREE_ADDRESSABLE (exp)
3648 && ! TREE_THIS_VOLATILE (exp)
3649 && ! DECL_NONLOCAL (exp)
3650 /* Don't regard global variables as simple. They may be
3651 allocated in ways unknown to the compiler (shared memory,
3652 #pragma weak, etc). */
3653 && ! TREE_PUBLIC (exp)
3654 && ! DECL_EXTERNAL (exp)
3655 /* Loading a static variable is unduly expensive, but global
3656 registers aren't expensive. */
3657 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3660 /* The following functions are subroutines to fold_range_test and allow it to
3661 try to change a logical combination of comparisons into a range test.
3664 X == 2 || X == 3 || X == 4 || X == 5
3668 (unsigned) (X - 2) <= 3
3670 We describe each set of comparisons as being either inside or outside
3671 a range, using a variable named like IN_P, and then describe the
3672 range with a lower and upper bound. If one of the bounds is omitted,
3673 it represents either the highest or lowest value of the type.
3675 In the comments below, we represent a range by two numbers in brackets
3676 preceded by a "+" to designate being inside that range, or a "-" to
3677 designate being outside that range, so the condition can be inverted by
3678 flipping the prefix. An omitted bound is represented by a "-". For
3679 example, "- [-, 10]" means being outside the range starting at the lowest
3680 possible value and ending at 10, in other words, being greater than 10.
3681 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3684 We set up things so that the missing bounds are handled in a consistent
3685 manner so neither a missing bound nor "true" and "false" need to be
3686 handled using a special case. */
3688 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3689 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3690 and UPPER1_P are nonzero if the respective argument is an upper bound
3691 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3692 must be specified for a comparison. ARG1 will be converted to ARG0's
3693 type if both are specified. */
3696 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3697 tree arg1, int upper1_p)
3703 /* If neither arg represents infinity, do the normal operation.
3704 Else, if not a comparison, return infinity. Else handle the special
3705 comparison rules. Note that most of the cases below won't occur, but
3706 are handled for consistency. */
3708 if (arg0 != 0 && arg1 != 0)
3710 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3711 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3713 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3716 if (TREE_CODE_CLASS (code) != tcc_comparison)
3719 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3720 for neither. In real maths, we cannot assume open ended ranges are
3721 the same. But, this is computer arithmetic, where numbers are finite.
3722 We can therefore make the transformation of any unbounded range with
3723 the value Z, Z being greater than any representable number. This permits
3724 us to treat unbounded ranges as equal. */
3725 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3726 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3730 result = sgn0 == sgn1;
3733 result = sgn0 != sgn1;
3736 result = sgn0 < sgn1;
3739 result = sgn0 <= sgn1;
3742 result = sgn0 > sgn1;
3745 result = sgn0 >= sgn1;
3751 return constant_boolean_node (result, type);
3754 /* Given EXP, a logical expression, set the range it is testing into
3755 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3756 actually being tested. *PLOW and *PHIGH will be made of the same type
3757 as the returned expression. If EXP is not a comparison, we will most
3758 likely not be returning a useful value and range. */
3761 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3763 enum tree_code code;
3764 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3765 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3767 tree low, high, n_low, n_high;
3769 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3770 and see if we can refine the range. Some of the cases below may not
3771 happen, but it doesn't seem worth worrying about this. We "continue"
3772 the outer loop when we've changed something; otherwise we "break"
3773 the switch, which will "break" the while. */
3776 low = high = build_int_cst (TREE_TYPE (exp), 0);
3780 code = TREE_CODE (exp);
3781 exp_type = TREE_TYPE (exp);
3783 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3785 if (TREE_CODE_LENGTH (code) > 0)
3786 arg0 = TREE_OPERAND (exp, 0);
3787 if (TREE_CODE_CLASS (code) == tcc_comparison
3788 || TREE_CODE_CLASS (code) == tcc_unary
3789 || TREE_CODE_CLASS (code) == tcc_binary)
3790 arg0_type = TREE_TYPE (arg0);
3791 if (TREE_CODE_CLASS (code) == tcc_binary
3792 || TREE_CODE_CLASS (code) == tcc_comparison
3793 || (TREE_CODE_CLASS (code) == tcc_expression
3794 && TREE_CODE_LENGTH (code) > 1))
3795 arg1 = TREE_OPERAND (exp, 1);
3800 case TRUTH_NOT_EXPR:
3801 in_p = ! in_p, exp = arg0;
3804 case EQ_EXPR: case NE_EXPR:
3805 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3806 /* We can only do something if the range is testing for zero
3807 and if the second operand is an integer constant. Note that
3808 saying something is "in" the range we make is done by
3809 complementing IN_P since it will set in the initial case of
3810 being not equal to zero; "out" is leaving it alone. */
3811 if (low == 0 || high == 0
3812 || ! integer_zerop (low) || ! integer_zerop (high)
3813 || TREE_CODE (arg1) != INTEGER_CST)
3818 case NE_EXPR: /* - [c, c] */
3821 case EQ_EXPR: /* + [c, c] */
3822 in_p = ! in_p, low = high = arg1;
3824 case GT_EXPR: /* - [-, c] */
3825 low = 0, high = arg1;
3827 case GE_EXPR: /* + [c, -] */
3828 in_p = ! in_p, low = arg1, high = 0;
3830 case LT_EXPR: /* - [c, -] */
3831 low = arg1, high = 0;
3833 case LE_EXPR: /* + [-, c] */
3834 in_p = ! in_p, low = 0, high = arg1;
3840 /* If this is an unsigned comparison, we also know that EXP is
3841 greater than or equal to zero. We base the range tests we make
3842 on that fact, so we record it here so we can parse existing
3843 range tests. We test arg0_type since often the return type
3844 of, e.g. EQ_EXPR, is boolean. */
3845 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3847 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3849 build_int_cst (arg0_type, 0),
3853 in_p = n_in_p, low = n_low, high = n_high;
3855 /* If the high bound is missing, but we have a nonzero low
3856 bound, reverse the range so it goes from zero to the low bound
3858 if (high == 0 && low && ! integer_zerop (low))
3861 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3862 integer_one_node, 0);
3863 low = build_int_cst (arg0_type, 0);
3871 /* (-x) IN [a,b] -> x in [-b, -a] */
3872 n_low = range_binop (MINUS_EXPR, exp_type,
3873 build_int_cst (exp_type, 0),
3875 n_high = range_binop (MINUS_EXPR, exp_type,
3876 build_int_cst (exp_type, 0),
3878 low = n_low, high = n_high;
3884 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3885 build_int_cst (exp_type, 1));
3888 case PLUS_EXPR: case MINUS_EXPR:
3889 if (TREE_CODE (arg1) != INTEGER_CST)
3892 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3893 move a constant to the other side. */
3894 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3897 /* If EXP is signed, any overflow in the computation is undefined,
3898 so we don't worry about it so long as our computations on
3899 the bounds don't overflow. For unsigned, overflow is defined
3900 and this is exactly the right thing. */
3901 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3902 arg0_type, low, 0, arg1, 0);
3903 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3904 arg0_type, high, 1, arg1, 0);
3905 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3906 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3909 /* Check for an unsigned range which has wrapped around the maximum
3910 value thus making n_high < n_low, and normalize it. */
3911 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3913 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3914 integer_one_node, 0);
3915 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3916 integer_one_node, 0);
3918 /* If the range is of the form +/- [ x+1, x ], we won't
3919 be able to normalize it. But then, it represents the
3920 whole range or the empty set, so make it
3922 if (tree_int_cst_equal (n_low, low)
3923 && tree_int_cst_equal (n_high, high))
3929 low = n_low, high = n_high;
3934 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3935 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3938 if (! INTEGRAL_TYPE_P (arg0_type)
3939 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3940 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3943 n_low = low, n_high = high;
3946 n_low = fold_convert (arg0_type, n_low);
3949 n_high = fold_convert (arg0_type, n_high);
3952 /* If we're converting arg0 from an unsigned type, to exp,
3953 a signed type, we will be doing the comparison as unsigned.
3954 The tests above have already verified that LOW and HIGH
3957 So we have to ensure that we will handle large unsigned
3958 values the same way that the current signed bounds treat
3961 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3964 tree equiv_type = lang_hooks.types.type_for_mode
3965 (TYPE_MODE (arg0_type), 1);
3967 /* A range without an upper bound is, naturally, unbounded.
3968 Since convert would have cropped a very large value, use
3969 the max value for the destination type. */
3971 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3972 : TYPE_MAX_VALUE (arg0_type);
3974 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3975 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3976 fold_convert (arg0_type,
3978 fold_convert (arg0_type,
3981 /* If the low bound is specified, "and" the range with the
3982 range for which the original unsigned value will be
3986 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3987 1, n_low, n_high, 1,
3988 fold_convert (arg0_type,
3993 in_p = (n_in_p == in_p);
3997 /* Otherwise, "or" the range with the range of the input
3998 that will be interpreted as negative. */
3999 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4000 0, n_low, n_high, 1,
4001 fold_convert (arg0_type,
4006 in_p = (in_p != n_in_p);
4011 low = n_low, high = n_high;
4021 /* If EXP is a constant, we can evaluate whether this is true or false. */
4022 if (TREE_CODE (exp) == INTEGER_CST)
4024 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4026 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4032 *pin_p = in_p, *plow = low, *phigh = high;
4036 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4037 type, TYPE, return an expression to test if EXP is in (or out of, depending
4038 on IN_P) the range. Return 0 if the test couldn't be created. */
4041 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4043 tree etype = TREE_TYPE (exp);
4046 #ifdef HAVE_canonicalize_funcptr_for_compare
4047 /* Disable this optimization for function pointer expressions
4048 on targets that require function pointer canonicalization. */
4049 if (HAVE_canonicalize_funcptr_for_compare
4050 && TREE_CODE (etype) == POINTER_TYPE
4051 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4057 value = build_range_check (type, exp, 1, low, high);
4059 return invert_truthvalue (value);
4064 if (low == 0 && high == 0)
4065 return build_int_cst (type, 1);
4068 return fold_build2 (LE_EXPR, type, exp,
4069 fold_convert (etype, high));
4072 return fold_build2 (GE_EXPR, type, exp,
4073 fold_convert (etype, low));
4075 if (operand_equal_p (low, high, 0))
4076 return fold_build2 (EQ_EXPR, type, exp,
4077 fold_convert (etype, low));
4079 if (integer_zerop (low))
4081 if (! TYPE_UNSIGNED (etype))
4083 etype = lang_hooks.types.unsigned_type (etype);
4084 high = fold_convert (etype, high);
4085 exp = fold_convert (etype, exp);
4087 return build_range_check (type, exp, 1, 0, high);
4090 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4091 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4093 unsigned HOST_WIDE_INT lo;
4097 prec = TYPE_PRECISION (etype);
4098 if (prec <= HOST_BITS_PER_WIDE_INT)
4101 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4105 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4106 lo = (unsigned HOST_WIDE_INT) -1;
4109 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4111 if (TYPE_UNSIGNED (etype))
4113 etype = lang_hooks.types.signed_type (etype);
4114 exp = fold_convert (etype, exp);
4116 return fold_build2 (GT_EXPR, type, exp,
4117 build_int_cst (etype, 0));
4121 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4122 This requires wrap-around arithmetics for the type of the expression. */
4123 switch (TREE_CODE (etype))
4126 /* There is no requirement that LOW be within the range of ETYPE
4127 if the latter is a subtype. It must, however, be within the base
4128 type of ETYPE. So be sure we do the subtraction in that type. */
4129 if (TREE_TYPE (etype))
4130 etype = TREE_TYPE (etype);
4135 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4136 TYPE_UNSIGNED (etype));
4143 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4144 if (TREE_CODE (etype) == INTEGER_TYPE
4145 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4147 tree utype, minv, maxv;
4149 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4150 for the type in question, as we rely on this here. */
4151 utype = lang_hooks.types.unsigned_type (etype);
4152 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4153 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4154 integer_one_node, 1);
4155 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4157 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4164 high = fold_convert (etype, high);
4165 low = fold_convert (etype, low);
4166 exp = fold_convert (etype, exp);
4168 value = const_binop (MINUS_EXPR, high, low, 0);
4170 if (value != 0 && !TREE_OVERFLOW (value))
4171 return build_range_check (type,
4172 fold_build2 (MINUS_EXPR, etype, exp, low),
4173 1, build_int_cst (etype, 0), value);
4178 /* Return the predecessor of VAL in its type, handling the infinite case. */
4181 range_predecessor (tree val)
4183 tree type = TREE_TYPE (val);
4185 if (INTEGRAL_TYPE_P (type)
4186 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4189 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4192 /* Return the successor of VAL in its type, handling the infinite case. */
4195 range_successor (tree val)
4197 tree type = TREE_TYPE (val);
4199 if (INTEGRAL_TYPE_P (type)
4200 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4203 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4206 /* Given two ranges, see if we can merge them into one. Return 1 if we
4207 can, 0 if we can't. Set the output range into the specified parameters. */
4210 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4211 tree high0, int in1_p, tree low1, tree high1)
4219 int lowequal = ((low0 == 0 && low1 == 0)
4220 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4221 low0, 0, low1, 0)));
4222 int highequal = ((high0 == 0 && high1 == 0)
4223 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4224 high0, 1, high1, 1)));
4226 /* Make range 0 be the range that starts first, or ends last if they
4227 start at the same value. Swap them if it isn't. */
4228 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4231 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4232 high1, 1, high0, 1))))
4234 temp = in0_p, in0_p = in1_p, in1_p = temp;
4235 tem = low0, low0 = low1, low1 = tem;
4236 tem = high0, high0 = high1, high1 = tem;
4239 /* Now flag two cases, whether the ranges are disjoint or whether the
4240 second range is totally subsumed in the first. Note that the tests
4241 below are simplified by the ones above. */
4242 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4243 high0, 1, low1, 0));
4244 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4245 high1, 1, high0, 1));
4247 /* We now have four cases, depending on whether we are including or
4248 excluding the two ranges. */
4251 /* If they don't overlap, the result is false. If the second range
4252 is a subset it is the result. Otherwise, the range is from the start
4253 of the second to the end of the first. */
4255 in_p = 0, low = high = 0;
4257 in_p = 1, low = low1, high = high1;
4259 in_p = 1, low = low1, high = high0;
4262 else if (in0_p && ! in1_p)
4264 /* If they don't overlap, the result is the first range. If they are
4265 equal, the result is false. If the second range is a subset of the
4266 first, and the ranges begin at the same place, we go from just after
4267 the end of the second range to the end of the first. If the second
4268 range is not a subset of the first, or if it is a subset and both
4269 ranges end at the same place, the range starts at the start of the
4270 first range and ends just before the second range.
4271 Otherwise, we can't describe this as a single range. */
4273 in_p = 1, low = low0, high = high0;
4274 else if (lowequal && highequal)
4275 in_p = 0, low = high = 0;
4276 else if (subset && lowequal)
4278 low = range_successor (high1);
4282 else if (! subset || highequal)
4285 high = range_predecessor (low1);
4292 else if (! in0_p && in1_p)
4294 /* If they don't overlap, the result is the second range. If the second
4295 is a subset of the first, the result is false. Otherwise,
4296 the range starts just after the first range and ends at the
4297 end of the second. */
4299 in_p = 1, low = low1, high = high1;
4300 else if (subset || highequal)
4301 in_p = 0, low = high = 0;
4304 low = range_successor (high0);
4312 /* The case where we are excluding both ranges. Here the complex case
4313 is if they don't overlap. In that case, the only time we have a
4314 range is if they are adjacent. If the second is a subset of the
4315 first, the result is the first. Otherwise, the range to exclude
4316 starts at the beginning of the first range and ends at the end of the
4320 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4321 range_successor (high0),
4323 in_p = 0, low = low0, high = high1;
4326 /* Canonicalize - [min, x] into - [-, x]. */
4327 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4328 switch (TREE_CODE (TREE_TYPE (low0)))
4331 if (TYPE_PRECISION (TREE_TYPE (low0))
4332 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4336 if (tree_int_cst_equal (low0,
4337 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4341 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4342 && integer_zerop (low0))
4349 /* Canonicalize - [x, max] into - [x, -]. */
4350 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4351 switch (TREE_CODE (TREE_TYPE (high1)))
4354 if (TYPE_PRECISION (TREE_TYPE (high1))
4355 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4359 if (tree_int_cst_equal (high1,
4360 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4364 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4365 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4367 integer_one_node, 1)))
4374 /* The ranges might be also adjacent between the maximum and
4375 minimum values of the given type. For
4376 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4377 return + [x + 1, y - 1]. */
4378 if (low0 == 0 && high1 == 0)
4380 low = range_successor (high0);
4381 high = range_predecessor (low1);
4382 if (low == 0 || high == 0)
4392 in_p = 0, low = low0, high = high0;
4394 in_p = 0, low = low0, high = high1;
4397 *pin_p = in_p, *plow = low, *phigh = high;
4402 /* Subroutine of fold, looking inside expressions of the form
4403 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4404 of the COND_EXPR. This function is being used also to optimize
4405 A op B ? C : A, by reversing the comparison first.
4407 Return a folded expression whose code is not a COND_EXPR
4408 anymore, or NULL_TREE if no folding opportunity is found. */
4411 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4413 enum tree_code comp_code = TREE_CODE (arg0);
4414 tree arg00 = TREE_OPERAND (arg0, 0);
4415 tree arg01 = TREE_OPERAND (arg0, 1);
4416 tree arg1_type = TREE_TYPE (arg1);
4422 /* If we have A op 0 ? A : -A, consider applying the following
4425 A == 0? A : -A same as -A
4426 A != 0? A : -A same as A
4427 A >= 0? A : -A same as abs (A)
4428 A > 0? A : -A same as abs (A)
4429 A <= 0? A : -A same as -abs (A)
4430 A < 0? A : -A same as -abs (A)
4432 None of these transformations work for modes with signed
4433 zeros. If A is +/-0, the first two transformations will
4434 change the sign of the result (from +0 to -0, or vice
4435 versa). The last four will fix the sign of the result,
4436 even though the original expressions could be positive or
4437 negative, depending on the sign of A.
4439 Note that all these transformations are correct if A is
4440 NaN, since the two alternatives (A and -A) are also NaNs. */
4441 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4442 ? real_zerop (arg01)
4443 : integer_zerop (arg01))
4444 && ((TREE_CODE (arg2) == NEGATE_EXPR
4445 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4446 /* In the case that A is of the form X-Y, '-A' (arg2) may
4447 have already been folded to Y-X, check for that. */
4448 || (TREE_CODE (arg1) == MINUS_EXPR
4449 && TREE_CODE (arg2) == MINUS_EXPR
4450 && operand_equal_p (TREE_OPERAND (arg1, 0),
4451 TREE_OPERAND (arg2, 1), 0)
4452 && operand_equal_p (TREE_OPERAND (arg1, 1),
4453 TREE_OPERAND (arg2, 0), 0))))
4458 tem = fold_convert (arg1_type, arg1);
4459 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4462 return pedantic_non_lvalue (fold_convert (type, arg1));
4465 if (flag_trapping_math)
4470 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4471 arg1 = fold_convert (lang_hooks.types.signed_type
4472 (TREE_TYPE (arg1)), arg1);
4473 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4474 return pedantic_non_lvalue (fold_convert (type, tem));
4477 if (flag_trapping_math)
4481 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4482 arg1 = fold_convert (lang_hooks.types.signed_type
4483 (TREE_TYPE (arg1)), arg1);
4484 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4485 return negate_expr (fold_convert (type, tem));
4487 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4491 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4492 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4493 both transformations are correct when A is NaN: A != 0
4494 is then true, and A == 0 is false. */
4496 if (integer_zerop (arg01) && integer_zerop (arg2))
4498 if (comp_code == NE_EXPR)
4499 return pedantic_non_lvalue (fold_convert (type, arg1));
4500 else if (comp_code == EQ_EXPR)
4501 return build_int_cst (type, 0);
4504 /* Try some transformations of A op B ? A : B.
4506 A == B? A : B same as B
4507 A != B? A : B same as A
4508 A >= B? A : B same as max (A, B)
4509 A > B? A : B same as max (B, A)
4510 A <= B? A : B same as min (A, B)
4511 A < B? A : B same as min (B, A)
4513 As above, these transformations don't work in the presence
4514 of signed zeros. For example, if A and B are zeros of
4515 opposite sign, the first two transformations will change
4516 the sign of the result. In the last four, the original
4517 expressions give different results for (A=+0, B=-0) and
4518 (A=-0, B=+0), but the transformed expressions do not.
4520 The first two transformations are correct if either A or B
4521 is a NaN. In the first transformation, the condition will
4522 be false, and B will indeed be chosen. In the case of the
4523 second transformation, the condition A != B will be true,
4524 and A will be chosen.
4526 The conversions to max() and min() are not correct if B is
4527 a number and A is not. The conditions in the original
4528 expressions will be false, so all four give B. The min()
4529 and max() versions would give a NaN instead. */
4530 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4531 /* Avoid these transformations if the COND_EXPR may be used
4532 as an lvalue in the C++ front-end. PR c++/19199. */
4534 || (strcmp (lang_hooks.name, "GNU C++") != 0
4535 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4536 || ! maybe_lvalue_p (arg1)
4537 || ! maybe_lvalue_p (arg2)))
4539 tree comp_op0 = arg00;
4540 tree comp_op1 = arg01;
4541 tree comp_type = TREE_TYPE (comp_op0);
4543 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4544 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4554 return pedantic_non_lvalue (fold_convert (type, arg2));
4556 return pedantic_non_lvalue (fold_convert (type, arg1));
4561 /* In C++ a ?: expression can be an lvalue, so put the
4562 operand which will be used if they are equal first
4563 so that we can convert this back to the
4564 corresponding COND_EXPR. */
4565 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4567 comp_op0 = fold_convert (comp_type, comp_op0);
4568 comp_op1 = fold_convert (comp_type, comp_op1);
4569 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4570 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4571 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4572 return pedantic_non_lvalue (fold_convert (type, tem));
4579 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4581 comp_op0 = fold_convert (comp_type, comp_op0);
4582 comp_op1 = fold_convert (comp_type, comp_op1);
4583 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4584 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4585 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4586 return pedantic_non_lvalue (fold_convert (type, tem));
4590 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4591 return pedantic_non_lvalue (fold_convert (type, arg2));
4594 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4595 return pedantic_non_lvalue (fold_convert (type, arg1));
4598 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4603 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4604 we might still be able to simplify this. For example,
4605 if C1 is one less or one more than C2, this might have started
4606 out as a MIN or MAX and been transformed by this function.
4607 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4609 if (INTEGRAL_TYPE_P (type)
4610 && TREE_CODE (arg01) == INTEGER_CST
4611 && TREE_CODE (arg2) == INTEGER_CST)
4615 /* We can replace A with C1 in this case. */
4616 arg1 = fold_convert (type, arg01);
4617 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4620 /* If C1 is C2 + 1, this is min(A, C2). */
4621 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4623 && operand_equal_p (arg01,
4624 const_binop (PLUS_EXPR, arg2,
4625 integer_one_node, 0),
4627 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4632 /* If C1 is C2 - 1, this is min(A, C2). */
4633 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4635 && operand_equal_p (arg01,
4636 const_binop (MINUS_EXPR, arg2,
4637 integer_one_node, 0),
4639 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4644 /* If C1 is C2 - 1, this is max(A, C2). */
4645 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4647 && operand_equal_p (arg01,
4648 const_binop (MINUS_EXPR, arg2,
4649 integer_one_node, 0),
4651 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4656 /* If C1 is C2 + 1, this is max(A, C2). */
4657 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4659 && operand_equal_p (arg01,
4660 const_binop (PLUS_EXPR, arg2,
4661 integer_one_node, 0),
4663 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4677 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4678 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4681 /* EXP is some logical combination of boolean tests. See if we can
4682 merge it into some range test. Return the new tree if so. */
4685 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4687 int or_op = (code == TRUTH_ORIF_EXPR
4688 || code == TRUTH_OR_EXPR);
4689 int in0_p, in1_p, in_p;
4690 tree low0, low1, low, high0, high1, high;
4691 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4692 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4695 /* If this is an OR operation, invert both sides; we will invert
4696 again at the end. */
4698 in0_p = ! in0_p, in1_p = ! in1_p;
4700 /* If both expressions are the same, if we can merge the ranges, and we
4701 can build the range test, return it or it inverted. If one of the
4702 ranges is always true or always false, consider it to be the same
4703 expression as the other. */
4704 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4705 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4707 && 0 != (tem = (build_range_check (type,
4709 : rhs != 0 ? rhs : integer_zero_node,
4711 return or_op ? invert_truthvalue (tem) : tem;
4713 /* On machines where the branch cost is expensive, if this is a
4714 short-circuited branch and the underlying object on both sides
4715 is the same, make a non-short-circuit operation. */
4716 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4717 && lhs != 0 && rhs != 0
4718 && (code == TRUTH_ANDIF_EXPR
4719 || code == TRUTH_ORIF_EXPR)
4720 && operand_equal_p (lhs, rhs, 0))
4722 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4723 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4724 which cases we can't do this. */
4725 if (simple_operand_p (lhs))
4726 return build2 (code == TRUTH_ANDIF_EXPR
4727 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4730 else if (lang_hooks.decls.global_bindings_p () == 0
4731 && ! CONTAINS_PLACEHOLDER_P (lhs))
4733 tree common = save_expr (lhs);
4735 if (0 != (lhs = build_range_check (type, common,
4736 or_op ? ! in0_p : in0_p,
4738 && (0 != (rhs = build_range_check (type, common,
4739 or_op ? ! in1_p : in1_p,
4741 return build2 (code == TRUTH_ANDIF_EXPR
4742 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4750 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4751 bit value. Arrange things so the extra bits will be set to zero if and
4752 only if C is signed-extended to its full width. If MASK is nonzero,
4753 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4756 unextend (tree c, int p, int unsignedp, tree mask)
4758 tree type = TREE_TYPE (c);
4759 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4762 if (p == modesize || unsignedp)
4765 /* We work by getting just the sign bit into the low-order bit, then
4766 into the high-order bit, then sign-extend. We then XOR that value
4768 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4769 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4771 /* We must use a signed type in order to get an arithmetic right shift.
4772 However, we must also avoid introducing accidental overflows, so that
4773 a subsequent call to integer_zerop will work. Hence we must
4774 do the type conversion here. At this point, the constant is either
4775 zero or one, and the conversion to a signed type can never overflow.
4776 We could get an overflow if this conversion is done anywhere else. */
4777 if (TYPE_UNSIGNED (type))
4778 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4780 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4781 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4783 temp = const_binop (BIT_AND_EXPR, temp,
4784 fold_convert (TREE_TYPE (c), mask), 0);
4785 /* If necessary, convert the type back to match the type of C. */
4786 if (TYPE_UNSIGNED (type))
4787 temp = fold_convert (type, temp);
4789 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4792 /* Find ways of folding logical expressions of LHS and RHS:
4793 Try to merge two comparisons to the same innermost item.
4794 Look for range tests like "ch >= '0' && ch <= '9'".
4795 Look for combinations of simple terms on machines with expensive branches
4796 and evaluate the RHS unconditionally.
4798 For example, if we have p->a == 2 && p->b == 4 and we can make an
4799 object large enough to span both A and B, we can do this with a comparison
4800 against the object ANDed with the a mask.
4802 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4803 operations to do this with one comparison.
4805 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4806 function and the one above.
4808 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4809 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4811 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4814 We return the simplified tree or 0 if no optimization is possible. */
4817 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4819 /* If this is the "or" of two comparisons, we can do something if
4820 the comparisons are NE_EXPR. If this is the "and", we can do something
4821 if the comparisons are EQ_EXPR. I.e.,
4822 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4824 WANTED_CODE is this operation code. For single bit fields, we can
4825 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4826 comparison for one-bit fields. */
4828 enum tree_code wanted_code;
4829 enum tree_code lcode, rcode;
4830 tree ll_arg, lr_arg, rl_arg, rr_arg;
4831 tree ll_inner, lr_inner, rl_inner, rr_inner;
4832 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4833 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4834 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4835 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4836 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4837 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4838 enum machine_mode lnmode, rnmode;
4839 tree ll_mask, lr_mask, rl_mask, rr_mask;
4840 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4841 tree l_const, r_const;
4842 tree lntype, rntype, result;
4843 int first_bit, end_bit;
4846 /* Start by getting the comparison codes. Fail if anything is volatile.
4847 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4848 it were surrounded with a NE_EXPR. */
4850 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4853 lcode = TREE_CODE (lhs);
4854 rcode = TREE_CODE (rhs);
4856 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4858 lhs = build2 (NE_EXPR, truth_type, lhs,
4859 build_int_cst (TREE_TYPE (lhs), 0));
4863 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4865 rhs = build2 (NE_EXPR, truth_type, rhs,
4866 build_int_cst (TREE_TYPE (rhs), 0));
4870 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4871 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4874 ll_arg = TREE_OPERAND (lhs, 0);
4875 lr_arg = TREE_OPERAND (lhs, 1);
4876 rl_arg = TREE_OPERAND (rhs, 0);
4877 rr_arg = TREE_OPERAND (rhs, 1);
4879 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4880 if (simple_operand_p (ll_arg)
4881 && simple_operand_p (lr_arg))
4884 if (operand_equal_p (ll_arg, rl_arg, 0)
4885 && operand_equal_p (lr_arg, rr_arg, 0))
4887 result = combine_comparisons (code, lcode, rcode,
4888 truth_type, ll_arg, lr_arg);
4892 else if (operand_equal_p (ll_arg, rr_arg, 0)
4893 && operand_equal_p (lr_arg, rl_arg, 0))
4895 result = combine_comparisons (code, lcode,
4896 swap_tree_comparison (rcode),
4897 truth_type, ll_arg, lr_arg);
4903 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4904 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4906 /* If the RHS can be evaluated unconditionally and its operands are
4907 simple, it wins to evaluate the RHS unconditionally on machines
4908 with expensive branches. In this case, this isn't a comparison
4909 that can be merged. Avoid doing this if the RHS is a floating-point
4910 comparison since those can trap. */
4912 if (BRANCH_COST >= 2
4913 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4914 && simple_operand_p (rl_arg)
4915 && simple_operand_p (rr_arg))
4917 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4918 if (code == TRUTH_OR_EXPR
4919 && lcode == NE_EXPR && integer_zerop (lr_arg)
4920 && rcode == NE_EXPR && integer_zerop (rr_arg)
4921 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4922 return build2 (NE_EXPR, truth_type,
4923 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4925 build_int_cst (TREE_TYPE (ll_arg), 0));
4927 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4928 if (code == TRUTH_AND_EXPR
4929 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4930 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4931 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4932 return build2 (EQ_EXPR, truth_type,
4933 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4935 build_int_cst (TREE_TYPE (ll_arg), 0));
4937 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4938 return build2 (code, truth_type, lhs, rhs);
4941 /* See if the comparisons can be merged. Then get all the parameters for
4944 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4945 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4949 ll_inner = decode_field_reference (ll_arg,
4950 &ll_bitsize, &ll_bitpos, &ll_mode,
4951 &ll_unsignedp, &volatilep, &ll_mask,
4953 lr_inner = decode_field_reference (lr_arg,
4954 &lr_bitsize, &lr_bitpos, &lr_mode,
4955 &lr_unsignedp, &volatilep, &lr_mask,
4957 rl_inner = decode_field_reference (rl_arg,
4958 &rl_bitsize, &rl_bitpos, &rl_mode,
4959 &rl_unsignedp, &volatilep, &rl_mask,
4961 rr_inner = decode_field_reference (rr_arg,
4962 &rr_bitsize, &rr_bitpos, &rr_mode,
4963 &rr_unsignedp, &volatilep, &rr_mask,
4966 /* It must be true that the inner operation on the lhs of each
4967 comparison must be the same if we are to be able to do anything.
4968 Then see if we have constants. If not, the same must be true for
4970 if (volatilep || ll_inner == 0 || rl_inner == 0
4971 || ! operand_equal_p (ll_inner, rl_inner, 0))
4974 if (TREE_CODE (lr_arg) == INTEGER_CST
4975 && TREE_CODE (rr_arg) == INTEGER_CST)
4976 l_const = lr_arg, r_const = rr_arg;
4977 else if (lr_inner == 0 || rr_inner == 0
4978 || ! operand_equal_p (lr_inner, rr_inner, 0))
4981 l_const = r_const = 0;
4983 /* If either comparison code is not correct for our logical operation,
4984 fail. However, we can convert a one-bit comparison against zero into
4985 the opposite comparison against that bit being set in the field. */
4987 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4988 if (lcode != wanted_code)
4990 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4992 /* Make the left operand unsigned, since we are only interested
4993 in the value of one bit. Otherwise we are doing the wrong
5002 /* This is analogous to the code for l_const above. */
5003 if (rcode != wanted_code)
5005 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5014 /* After this point all optimizations will generate bit-field
5015 references, which we might not want. */
5016 if (! lang_hooks.can_use_bit_fields_p ())
5019 /* See if we can find a mode that contains both fields being compared on
5020 the left. If we can't, fail. Otherwise, update all constants and masks
5021 to be relative to a field of that size. */
5022 first_bit = MIN (ll_bitpos, rl_bitpos);
5023 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5024 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5025 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5027 if (lnmode == VOIDmode)
5030 lnbitsize = GET_MODE_BITSIZE (lnmode);
5031 lnbitpos = first_bit & ~ (lnbitsize - 1);
5032 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5033 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5035 if (BYTES_BIG_ENDIAN)
5037 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5038 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5041 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5042 size_int (xll_bitpos), 0);
5043 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5044 size_int (xrl_bitpos), 0);
5048 l_const = fold_convert (lntype, l_const);
5049 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5050 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5051 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5052 fold_build1 (BIT_NOT_EXPR,
5056 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5058 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5063 r_const = fold_convert (lntype, r_const);
5064 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5065 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5066 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5067 fold_build1 (BIT_NOT_EXPR,
5071 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5073 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5077 /* If the right sides are not constant, do the same for it. Also,
5078 disallow this optimization if a size or signedness mismatch occurs
5079 between the left and right sides. */
5082 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5083 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5084 /* Make sure the two fields on the right
5085 correspond to the left without being swapped. */
5086 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5089 first_bit = MIN (lr_bitpos, rr_bitpos);
5090 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5091 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5092 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5094 if (rnmode == VOIDmode)
5097 rnbitsize = GET_MODE_BITSIZE (rnmode);
5098 rnbitpos = first_bit & ~ (rnbitsize - 1);
5099 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5100 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5102 if (BYTES_BIG_ENDIAN)
5104 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5105 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5108 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5109 size_int (xlr_bitpos), 0);
5110 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5111 size_int (xrr_bitpos), 0);
5113 /* Make a mask that corresponds to both fields being compared.
5114 Do this for both items being compared. If the operands are the
5115 same size and the bits being compared are in the same position
5116 then we can do this by masking both and comparing the masked
5118 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5119 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5120 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5122 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5123 ll_unsignedp || rl_unsignedp);
5124 if (! all_ones_mask_p (ll_mask, lnbitsize))
5125 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5127 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5128 lr_unsignedp || rr_unsignedp);
5129 if (! all_ones_mask_p (lr_mask, rnbitsize))
5130 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5132 return build2 (wanted_code, truth_type, lhs, rhs);
5135 /* There is still another way we can do something: If both pairs of
5136 fields being compared are adjacent, we may be able to make a wider
5137 field containing them both.
5139 Note that we still must mask the lhs/rhs expressions. Furthermore,
5140 the mask must be shifted to account for the shift done by
5141 make_bit_field_ref. */
5142 if ((ll_bitsize + ll_bitpos == rl_bitpos
5143 && lr_bitsize + lr_bitpos == rr_bitpos)
5144 || (ll_bitpos == rl_bitpos + rl_bitsize
5145 && lr_bitpos == rr_bitpos + rr_bitsize))
5149 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5150 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5151 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5152 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5154 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5155 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5156 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5157 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5159 /* Convert to the smaller type before masking out unwanted bits. */
5161 if (lntype != rntype)
5163 if (lnbitsize > rnbitsize)
5165 lhs = fold_convert (rntype, lhs);
5166 ll_mask = fold_convert (rntype, ll_mask);
5169 else if (lnbitsize < rnbitsize)
5171 rhs = fold_convert (lntype, rhs);
5172 lr_mask = fold_convert (lntype, lr_mask);
5177 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5178 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5180 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5181 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5183 return build2 (wanted_code, truth_type, lhs, rhs);
5189 /* Handle the case of comparisons with constants. If there is something in
5190 common between the masks, those bits of the constants must be the same.
5191 If not, the condition is always false. Test for this to avoid generating
5192 incorrect code below. */
5193 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5194 if (! integer_zerop (result)
5195 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5196 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5198 if (wanted_code == NE_EXPR)
5200 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5201 return constant_boolean_node (true, truth_type);
5205 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5206 return constant_boolean_node (false, truth_type);
5210 /* Construct the expression we will return. First get the component
5211 reference we will make. Unless the mask is all ones the width of
5212 that field, perform the mask operation. Then compare with the
5214 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5215 ll_unsignedp || rl_unsignedp);
5217 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5218 if (! all_ones_mask_p (ll_mask, lnbitsize))
5219 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5221 return build2 (wanted_code, truth_type, result,
5222 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5225 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5229 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5232 enum tree_code op_code;
5233 tree comp_const = op1;
5235 int consts_equal, consts_lt;
5238 STRIP_SIGN_NOPS (arg0);
5240 op_code = TREE_CODE (arg0);
5241 minmax_const = TREE_OPERAND (arg0, 1);
5242 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5243 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5244 inner = TREE_OPERAND (arg0, 0);
5246 /* If something does not permit us to optimize, return the original tree. */
5247 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5248 || TREE_CODE (comp_const) != INTEGER_CST
5249 || TREE_CONSTANT_OVERFLOW (comp_const)
5250 || TREE_CODE (minmax_const) != INTEGER_CST
5251 || TREE_CONSTANT_OVERFLOW (minmax_const))
5254 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5255 and GT_EXPR, doing the rest with recursive calls using logical
5259 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5261 /* FIXME: We should be able to invert code without building a
5262 scratch tree node, but doing so would require us to
5263 duplicate a part of invert_truthvalue here. */
5264 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5265 tem = optimize_minmax_comparison (TREE_CODE (tem),
5267 TREE_OPERAND (tem, 0),
5268 TREE_OPERAND (tem, 1));
5269 return invert_truthvalue (tem);
5274 fold_build2 (TRUTH_ORIF_EXPR, type,
5275 optimize_minmax_comparison
5276 (EQ_EXPR, type, arg0, comp_const),
5277 optimize_minmax_comparison
5278 (GT_EXPR, type, arg0, comp_const));
5281 if (op_code == MAX_EXPR && consts_equal)
5282 /* MAX (X, 0) == 0 -> X <= 0 */
5283 return fold_build2 (LE_EXPR, type, inner, comp_const);
5285 else if (op_code == MAX_EXPR && consts_lt)
5286 /* MAX (X, 0) == 5 -> X == 5 */
5287 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5289 else if (op_code == MAX_EXPR)
5290 /* MAX (X, 0) == -1 -> false */
5291 return omit_one_operand (type, integer_zero_node, inner);
5293 else if (consts_equal)
5294 /* MIN (X, 0) == 0 -> X >= 0 */
5295 return fold_build2 (GE_EXPR, type, inner, comp_const);
5298 /* MIN (X, 0) == 5 -> false */
5299 return omit_one_operand (type, integer_zero_node, inner);
5302 /* MIN (X, 0) == -1 -> X == -1 */
5303 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5306 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5307 /* MAX (X, 0) > 0 -> X > 0
5308 MAX (X, 0) > 5 -> X > 5 */
5309 return fold_build2 (GT_EXPR, type, inner, comp_const);
5311 else if (op_code == MAX_EXPR)
5312 /* MAX (X, 0) > -1 -> true */
5313 return omit_one_operand (type, integer_one_node, inner);
5315 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5316 /* MIN (X, 0) > 0 -> false
5317 MIN (X, 0) > 5 -> false */
5318 return omit_one_operand (type, integer_zero_node, inner);
5321 /* MIN (X, 0) > -1 -> X > -1 */
5322 return fold_build2 (GT_EXPR, type, inner, comp_const);
5329 /* T is an integer expression that is being multiplied, divided, or taken a
5330 modulus (CODE says which and what kind of divide or modulus) by a
5331 constant C. See if we can eliminate that operation by folding it with
5332 other operations already in T. WIDE_TYPE, if non-null, is a type that
5333 should be used for the computation if wider than our type.
5335 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5336 (X * 2) + (Y * 4). We must, however, be assured that either the original
5337 expression would not overflow or that overflow is undefined for the type
5338 in the language in question.
5340 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5341 the machine has a multiply-accumulate insn or that this is part of an
5342 addressing calculation.
5344 If we return a non-null expression, it is an equivalent form of the
5345 original computation, but need not be in the original type. */
5348 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5350 /* To avoid exponential search depth, refuse to allow recursion past
5351 three levels. Beyond that (1) it's highly unlikely that we'll find
5352 something interesting and (2) we've probably processed it before
5353 when we built the inner expression. */
5362 ret = extract_muldiv_1 (t, c, code, wide_type);
5369 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5371 tree type = TREE_TYPE (t);
5372 enum tree_code tcode = TREE_CODE (t);
5373 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5374 > GET_MODE_SIZE (TYPE_MODE (type)))
5375 ? wide_type : type);
5377 int same_p = tcode == code;
5378 tree op0 = NULL_TREE, op1 = NULL_TREE;
5380 /* Don't deal with constants of zero here; they confuse the code below. */
5381 if (integer_zerop (c))
5384 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5385 op0 = TREE_OPERAND (t, 0);
5387 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5388 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5390 /* Note that we need not handle conditional operations here since fold
5391 already handles those cases. So just do arithmetic here. */
5395 /* For a constant, we can always simplify if we are a multiply
5396 or (for divide and modulus) if it is a multiple of our constant. */
5397 if (code == MULT_EXPR
5398 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5399 return const_binop (code, fold_convert (ctype, t),
5400 fold_convert (ctype, c), 0);
5403 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5404 /* If op0 is an expression ... */
5405 if ((COMPARISON_CLASS_P (op0)
5406 || UNARY_CLASS_P (op0)
5407 || BINARY_CLASS_P (op0)
5408 || EXPRESSION_CLASS_P (op0))
5409 /* ... and is unsigned, and its type is smaller than ctype,
5410 then we cannot pass through as widening. */
5411 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5412 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5413 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5414 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5415 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5416 /* ... or this is a truncation (t is narrower than op0),
5417 then we cannot pass through this narrowing. */
5418 || (GET_MODE_SIZE (TYPE_MODE (type))
5419 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5420 /* ... or signedness changes for division or modulus,
5421 then we cannot pass through this conversion. */
5422 || (code != MULT_EXPR
5423 && (TYPE_UNSIGNED (ctype)
5424 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5427 /* Pass the constant down and see if we can make a simplification. If
5428 we can, replace this expression with the inner simplification for
5429 possible later conversion to our or some other type. */
5430 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5431 && TREE_CODE (t2) == INTEGER_CST
5432 && ! TREE_CONSTANT_OVERFLOW (t2)
5433 && (0 != (t1 = extract_muldiv (op0, t2, code,
5435 ? ctype : NULL_TREE))))
5440 /* If widening the type changes it from signed to unsigned, then we
5441 must avoid building ABS_EXPR itself as unsigned. */
5442 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5444 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5445 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5447 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5448 return fold_convert (ctype, t1);
5454 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5455 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5458 case MIN_EXPR: case MAX_EXPR:
5459 /* If widening the type changes the signedness, then we can't perform
5460 this optimization as that changes the result. */
5461 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5464 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5465 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5466 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5468 if (tree_int_cst_sgn (c) < 0)
5469 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5471 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5472 fold_convert (ctype, t2));
5476 case LSHIFT_EXPR: case RSHIFT_EXPR:
5477 /* If the second operand is constant, this is a multiplication
5478 or floor division, by a power of two, so we can treat it that
5479 way unless the multiplier or divisor overflows. Signed
5480 left-shift overflow is implementation-defined rather than
5481 undefined in C90, so do not convert signed left shift into
5483 if (TREE_CODE (op1) == INTEGER_CST
5484 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5485 /* const_binop may not detect overflow correctly,
5486 so check for it explicitly here. */
5487 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5488 && TREE_INT_CST_HIGH (op1) == 0
5489 && 0 != (t1 = fold_convert (ctype,
5490 const_binop (LSHIFT_EXPR,
5493 && ! TREE_OVERFLOW (t1))
5494 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5495 ? MULT_EXPR : FLOOR_DIV_EXPR,
5496 ctype, fold_convert (ctype, op0), t1),
5497 c, code, wide_type);
5500 case PLUS_EXPR: case MINUS_EXPR:
5501 /* See if we can eliminate the operation on both sides. If we can, we
5502 can return a new PLUS or MINUS. If we can't, the only remaining
5503 cases where we can do anything are if the second operand is a
5505 t1 = extract_muldiv (op0, c, code, wide_type);
5506 t2 = extract_muldiv (op1, c, code, wide_type);
5507 if (t1 != 0 && t2 != 0
5508 && (code == MULT_EXPR
5509 /* If not multiplication, we can only do this if both operands
5510 are divisible by c. */
5511 || (multiple_of_p (ctype, op0, c)
5512 && multiple_of_p (ctype, op1, c))))
5513 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5514 fold_convert (ctype, t2));
5516 /* If this was a subtraction, negate OP1 and set it to be an addition.
5517 This simplifies the logic below. */
5518 if (tcode == MINUS_EXPR)
5519 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5521 if (TREE_CODE (op1) != INTEGER_CST)
5524 /* If either OP1 or C are negative, this optimization is not safe for
5525 some of the division and remainder types while for others we need
5526 to change the code. */
5527 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5529 if (code == CEIL_DIV_EXPR)
5530 code = FLOOR_DIV_EXPR;
5531 else if (code == FLOOR_DIV_EXPR)
5532 code = CEIL_DIV_EXPR;
5533 else if (code != MULT_EXPR
5534 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5538 /* If it's a multiply or a division/modulus operation of a multiple
5539 of our constant, do the operation and verify it doesn't overflow. */
5540 if (code == MULT_EXPR
5541 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5543 op1 = const_binop (code, fold_convert (ctype, op1),
5544 fold_convert (ctype, c), 0);
5545 /* We allow the constant to overflow with wrapping semantics. */
5547 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5553 /* If we have an unsigned type is not a sizetype, we cannot widen
5554 the operation since it will change the result if the original
5555 computation overflowed. */
5556 if (TYPE_UNSIGNED (ctype)
5557 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5561 /* If we were able to eliminate our operation from the first side,
5562 apply our operation to the second side and reform the PLUS. */
5563 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5564 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5566 /* The last case is if we are a multiply. In that case, we can
5567 apply the distributive law to commute the multiply and addition
5568 if the multiplication of the constants doesn't overflow. */
5569 if (code == MULT_EXPR)
5570 return fold_build2 (tcode, ctype,
5571 fold_build2 (code, ctype,
5572 fold_convert (ctype, op0),
5573 fold_convert (ctype, c)),
5579 /* We have a special case here if we are doing something like
5580 (C * 8) % 4 since we know that's zero. */
5581 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5582 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5583 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5584 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5585 return omit_one_operand (type, integer_zero_node, op0);
5587 /* ... fall through ... */
5589 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5590 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5591 /* If we can extract our operation from the LHS, do so and return a
5592 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5593 do something only if the second operand is a constant. */
5595 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5596 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5597 fold_convert (ctype, op1));
5598 else if (tcode == MULT_EXPR && code == MULT_EXPR
5599 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5600 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5601 fold_convert (ctype, t1));
5602 else if (TREE_CODE (op1) != INTEGER_CST)
5605 /* If these are the same operation types, we can associate them
5606 assuming no overflow. */
5608 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5609 fold_convert (ctype, c), 0))
5610 && ! TREE_OVERFLOW (t1))
5611 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5613 /* If these operations "cancel" each other, we have the main
5614 optimizations of this pass, which occur when either constant is a
5615 multiple of the other, in which case we replace this with either an
5616 operation or CODE or TCODE.
5618 If we have an unsigned type that is not a sizetype, we cannot do
5619 this since it will change the result if the original computation
5621 if ((! TYPE_UNSIGNED (ctype)
5622 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5624 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5625 || (tcode == MULT_EXPR
5626 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5627 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5629 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5630 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5631 fold_convert (ctype,
5632 const_binop (TRUNC_DIV_EXPR,
5634 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5635 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5636 fold_convert (ctype,
5637 const_binop (TRUNC_DIV_EXPR,
5649 /* Return a node which has the indicated constant VALUE (either 0 or
5650 1), and is of the indicated TYPE. */
5653 constant_boolean_node (int value, tree type)
5655 if (type == integer_type_node)
5656 return value ? integer_one_node : integer_zero_node;
5657 else if (type == boolean_type_node)
5658 return value ? boolean_true_node : boolean_false_node;
5660 return build_int_cst (type, value);
5664 /* Return true if expr looks like an ARRAY_REF and set base and
5665 offset to the appropriate trees. If there is no offset,
5666 offset is set to NULL_TREE. Base will be canonicalized to
5667 something you can get the element type from using
5668 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5669 in bytes to the base. */
5672 extract_array_ref (tree expr, tree *base, tree *offset)
5674 /* One canonical form is a PLUS_EXPR with the first
5675 argument being an ADDR_EXPR with a possible NOP_EXPR
5677 if (TREE_CODE (expr) == PLUS_EXPR)
5679 tree op0 = TREE_OPERAND (expr, 0);
5680 tree inner_base, dummy1;
5681 /* Strip NOP_EXPRs here because the C frontends and/or
5682 folders present us (int *)&x.a + 4B possibly. */
5684 if (extract_array_ref (op0, &inner_base, &dummy1))
5687 if (dummy1 == NULL_TREE)
5688 *offset = TREE_OPERAND (expr, 1);
5690 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5691 dummy1, TREE_OPERAND (expr, 1));
5695 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5696 which we transform into an ADDR_EXPR with appropriate
5697 offset. For other arguments to the ADDR_EXPR we assume
5698 zero offset and as such do not care about the ADDR_EXPR
5699 type and strip possible nops from it. */
5700 else if (TREE_CODE (expr) == ADDR_EXPR)
5702 tree op0 = TREE_OPERAND (expr, 0);
5703 if (TREE_CODE (op0) == ARRAY_REF)
5705 tree idx = TREE_OPERAND (op0, 1);
5706 *base = TREE_OPERAND (op0, 0);
5707 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5708 array_ref_element_size (op0));
5712 /* Handle array-to-pointer decay as &a. */
5713 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5714 *base = TREE_OPERAND (expr, 0);
5717 *offset = NULL_TREE;
5721 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5722 else if (SSA_VAR_P (expr)
5723 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5726 *offset = NULL_TREE;
5734 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5735 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5736 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5737 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5738 COND is the first argument to CODE; otherwise (as in the example
5739 given here), it is the second argument. TYPE is the type of the
5740 original expression. Return NULL_TREE if no simplification is
5744 fold_binary_op_with_conditional_arg (enum tree_code code,
5745 tree type, tree op0, tree op1,
5746 tree cond, tree arg, int cond_first_p)
5748 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5749 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5750 tree test, true_value, false_value;
5751 tree lhs = NULL_TREE;
5752 tree rhs = NULL_TREE;
5754 /* This transformation is only worthwhile if we don't have to wrap
5755 arg in a SAVE_EXPR, and the operation can be simplified on at least
5756 one of the branches once its pushed inside the COND_EXPR. */
5757 if (!TREE_CONSTANT (arg))
5760 if (TREE_CODE (cond) == COND_EXPR)
5762 test = TREE_OPERAND (cond, 0);
5763 true_value = TREE_OPERAND (cond, 1);
5764 false_value = TREE_OPERAND (cond, 2);
5765 /* If this operand throws an expression, then it does not make
5766 sense to try to perform a logical or arithmetic operation
5768 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5770 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5775 tree testtype = TREE_TYPE (cond);
5777 true_value = constant_boolean_node (true, testtype);
5778 false_value = constant_boolean_node (false, testtype);
5781 arg = fold_convert (arg_type, arg);
5784 true_value = fold_convert (cond_type, true_value);
5786 lhs = fold_build2 (code, type, true_value, arg);
5788 lhs = fold_build2 (code, type, arg, true_value);
5792 false_value = fold_convert (cond_type, false_value);
5794 rhs = fold_build2 (code, type, false_value, arg);
5796 rhs = fold_build2 (code, type, arg, false_value);
5799 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5800 return fold_convert (type, test);
5804 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5806 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5807 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5808 ADDEND is the same as X.
5810 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5811 and finite. The problematic cases are when X is zero, and its mode
5812 has signed zeros. In the case of rounding towards -infinity,
5813 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5814 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5817 fold_real_zero_addition_p (tree type, tree addend, int negate)
5819 if (!real_zerop (addend))
5822 /* Don't allow the fold with -fsignaling-nans. */
5823 if (HONOR_SNANS (TYPE_MODE (type)))
5826 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5827 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5830 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5831 if (TREE_CODE (addend) == REAL_CST
5832 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5835 /* The mode has signed zeros, and we have to honor their sign.
5836 In this situation, there is only one case we can return true for.
5837 X - 0 is the same as X unless rounding towards -infinity is
5839 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5842 /* Subroutine of fold() that checks comparisons of built-in math
5843 functions against real constants.
5845 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5846 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5847 is the type of the result and ARG0 and ARG1 are the operands of the
5848 comparison. ARG1 must be a TREE_REAL_CST.
5850 The function returns the constant folded tree if a simplification
5851 can be made, and NULL_TREE otherwise. */
5854 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5855 tree type, tree arg0, tree arg1)
5859 if (BUILTIN_SQRT_P (fcode))
5861 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5862 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5864 c = TREE_REAL_CST (arg1);
5865 if (REAL_VALUE_NEGATIVE (c))
5867 /* sqrt(x) < y is always false, if y is negative. */
5868 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5869 return omit_one_operand (type, integer_zero_node, arg);
5871 /* sqrt(x) > y is always true, if y is negative and we
5872 don't care about NaNs, i.e. negative values of x. */
5873 if (code == NE_EXPR || !HONOR_NANS (mode))
5874 return omit_one_operand (type, integer_one_node, arg);
5876 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5877 return fold_build2 (GE_EXPR, type, arg,
5878 build_real (TREE_TYPE (arg), dconst0));
5880 else if (code == GT_EXPR || code == GE_EXPR)
5884 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5885 real_convert (&c2, mode, &c2);
5887 if (REAL_VALUE_ISINF (c2))
5889 /* sqrt(x) > y is x == +Inf, when y is very large. */
5890 if (HONOR_INFINITIES (mode))
5891 return fold_build2 (EQ_EXPR, type, arg,
5892 build_real (TREE_TYPE (arg), c2));
5894 /* sqrt(x) > y is always false, when y is very large
5895 and we don't care about infinities. */
5896 return omit_one_operand (type, integer_zero_node, arg);
5899 /* sqrt(x) > c is the same as x > c*c. */
5900 return fold_build2 (code, type, arg,
5901 build_real (TREE_TYPE (arg), c2));
5903 else if (code == LT_EXPR || code == LE_EXPR)
5907 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5908 real_convert (&c2, mode, &c2);
5910 if (REAL_VALUE_ISINF (c2))
5912 /* sqrt(x) < y is always true, when y is a very large
5913 value and we don't care about NaNs or Infinities. */
5914 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5915 return omit_one_operand (type, integer_one_node, arg);
5917 /* sqrt(x) < y is x != +Inf when y is very large and we
5918 don't care about NaNs. */
5919 if (! HONOR_NANS (mode))
5920 return fold_build2 (NE_EXPR, type, arg,
5921 build_real (TREE_TYPE (arg), c2));
5923 /* sqrt(x) < y is x >= 0 when y is very large and we
5924 don't care about Infinities. */
5925 if (! HONOR_INFINITIES (mode))
5926 return fold_build2 (GE_EXPR, type, arg,
5927 build_real (TREE_TYPE (arg), dconst0));
5929 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5930 if (lang_hooks.decls.global_bindings_p () != 0
5931 || CONTAINS_PLACEHOLDER_P (arg))
5934 arg = save_expr (arg);
5935 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5936 fold_build2 (GE_EXPR, type, arg,
5937 build_real (TREE_TYPE (arg),
5939 fold_build2 (NE_EXPR, type, arg,
5940 build_real (TREE_TYPE (arg),
5944 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5945 if (! HONOR_NANS (mode))
5946 return fold_build2 (code, type, arg,
5947 build_real (TREE_TYPE (arg), c2));
5949 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5950 if (lang_hooks.decls.global_bindings_p () == 0
5951 && ! CONTAINS_PLACEHOLDER_P (arg))
5953 arg = save_expr (arg);
5954 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5955 fold_build2 (GE_EXPR, type, arg,
5956 build_real (TREE_TYPE (arg),
5958 fold_build2 (code, type, arg,
5959 build_real (TREE_TYPE (arg),
5968 /* Subroutine of fold() that optimizes comparisons against Infinities,
5969 either +Inf or -Inf.
5971 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5972 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5973 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5975 The function returns the constant folded tree if a simplification
5976 can be made, and NULL_TREE otherwise. */
5979 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5981 enum machine_mode mode;
5982 REAL_VALUE_TYPE max;
5986 mode = TYPE_MODE (TREE_TYPE (arg0));
5988 /* For negative infinity swap the sense of the comparison. */
5989 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5991 code = swap_tree_comparison (code);
5996 /* x > +Inf is always false, if with ignore sNANs. */
5997 if (HONOR_SNANS (mode))
5999 return omit_one_operand (type, integer_zero_node, arg0);
6002 /* x <= +Inf is always true, if we don't case about NaNs. */
6003 if (! HONOR_NANS (mode))
6004 return omit_one_operand (type, integer_one_node, arg0);
6006 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6007 if (lang_hooks.decls.global_bindings_p () == 0
6008 && ! CONTAINS_PLACEHOLDER_P (arg0))
6010 arg0 = save_expr (arg0);
6011 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6017 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6018 real_maxval (&max, neg, mode);
6019 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6020 arg0, build_real (TREE_TYPE (arg0), max));
6023 /* x < +Inf is always equal to x <= DBL_MAX. */
6024 real_maxval (&max, neg, mode);
6025 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6026 arg0, build_real (TREE_TYPE (arg0), max));
6029 /* x != +Inf is always equal to !(x > DBL_MAX). */
6030 real_maxval (&max, neg, mode);
6031 if (! HONOR_NANS (mode))
6032 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6033 arg0, build_real (TREE_TYPE (arg0), max));
6035 /* The transformation below creates non-gimple code and thus is
6036 not appropriate if we are in gimple form. */
6040 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6041 arg0, build_real (TREE_TYPE (arg0), max));
6042 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6051 /* Subroutine of fold() that optimizes comparisons of a division by
6052 a nonzero integer constant against an integer constant, i.e.
6055 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6056 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6057 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6059 The function returns the constant folded tree if a simplification
6060 can be made, and NULL_TREE otherwise. */
6063 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6065 tree prod, tmp, hi, lo;
6066 tree arg00 = TREE_OPERAND (arg0, 0);
6067 tree arg01 = TREE_OPERAND (arg0, 1);
6068 unsigned HOST_WIDE_INT lpart;
6069 HOST_WIDE_INT hpart;
6073 /* We have to do this the hard way to detect unsigned overflow.
6074 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6075 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6076 TREE_INT_CST_HIGH (arg01),
6077 TREE_INT_CST_LOW (arg1),
6078 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6079 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6080 prod = force_fit_type (prod, -1, overflow, false);
6081 neg_overflow = false;
6083 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6085 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6088 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6089 overflow = add_double (TREE_INT_CST_LOW (prod),
6090 TREE_INT_CST_HIGH (prod),
6091 TREE_INT_CST_LOW (tmp),
6092 TREE_INT_CST_HIGH (tmp),
6094 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6095 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6096 TREE_CONSTANT_OVERFLOW (prod));
6098 else if (tree_int_cst_sgn (arg01) >= 0)
6100 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6101 switch (tree_int_cst_sgn (arg1))
6104 neg_overflow = true;
6105 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6110 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6115 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6125 /* A negative divisor reverses the relational operators. */
6126 code = swap_tree_comparison (code);
6128 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6129 switch (tree_int_cst_sgn (arg1))
6132 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6137 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6142 neg_overflow = true;
6143 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6155 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6156 return omit_one_operand (type, integer_zero_node, arg00);
6157 if (TREE_OVERFLOW (hi))
6158 return fold_build2 (GE_EXPR, type, arg00, lo);
6159 if (TREE_OVERFLOW (lo))
6160 return fold_build2 (LE_EXPR, type, arg00, hi);
6161 return build_range_check (type, arg00, 1, lo, hi);
6164 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6165 return omit_one_operand (type, integer_one_node, arg00);
6166 if (TREE_OVERFLOW (hi))
6167 return fold_build2 (LT_EXPR, type, arg00, lo);
6168 if (TREE_OVERFLOW (lo))
6169 return fold_build2 (GT_EXPR, type, arg00, hi);
6170 return build_range_check (type, arg00, 0, lo, hi);
6173 if (TREE_OVERFLOW (lo))
6175 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6176 return omit_one_operand (type, tmp, arg00);
6178 return fold_build2 (LT_EXPR, type, arg00, lo);
6181 if (TREE_OVERFLOW (hi))
6183 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6184 return omit_one_operand (type, tmp, arg00);
6186 return fold_build2 (LE_EXPR, type, arg00, hi);
6189 if (TREE_OVERFLOW (hi))
6191 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6192 return omit_one_operand (type, tmp, arg00);
6194 return fold_build2 (GT_EXPR, type, arg00, hi);
6197 if (TREE_OVERFLOW (lo))
6199 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6200 return omit_one_operand (type, tmp, arg00);
6202 return fold_build2 (GE_EXPR, type, arg00, lo);
6212 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6213 equality/inequality test, then return a simplified form of the test
6214 using a sign testing. Otherwise return NULL. TYPE is the desired
6218 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6221 /* If this is testing a single bit, we can optimize the test. */
6222 if ((code == NE_EXPR || code == EQ_EXPR)
6223 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6224 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6226 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6227 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6228 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6230 if (arg00 != NULL_TREE
6231 /* This is only a win if casting to a signed type is cheap,
6232 i.e. when arg00's type is not a partial mode. */
6233 && TYPE_PRECISION (TREE_TYPE (arg00))
6234 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6236 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6237 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6238 result_type, fold_convert (stype, arg00),
6239 build_int_cst (stype, 0));
6246 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6247 equality/inequality test, then return a simplified form of
6248 the test using shifts and logical operations. Otherwise return
6249 NULL. TYPE is the desired result type. */
6252 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6255 /* If this is testing a single bit, we can optimize the test. */
6256 if ((code == NE_EXPR || code == EQ_EXPR)
6257 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6258 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6260 tree inner = TREE_OPERAND (arg0, 0);
6261 tree type = TREE_TYPE (arg0);
6262 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6263 enum machine_mode operand_mode = TYPE_MODE (type);
6265 tree signed_type, unsigned_type, intermediate_type;
6268 /* First, see if we can fold the single bit test into a sign-bit
6270 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6275 /* Otherwise we have (A & C) != 0 where C is a single bit,
6276 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6277 Similarly for (A & C) == 0. */
6279 /* If INNER is a right shift of a constant and it plus BITNUM does
6280 not overflow, adjust BITNUM and INNER. */
6281 if (TREE_CODE (inner) == RSHIFT_EXPR
6282 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6283 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6284 && bitnum < TYPE_PRECISION (type)
6285 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6286 bitnum - TYPE_PRECISION (type)))
6288 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6289 inner = TREE_OPERAND (inner, 0);
6292 /* If we are going to be able to omit the AND below, we must do our
6293 operations as unsigned. If we must use the AND, we have a choice.
6294 Normally unsigned is faster, but for some machines signed is. */
6295 #ifdef LOAD_EXTEND_OP
6296 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6297 && !flag_syntax_only) ? 0 : 1;
6302 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6303 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6304 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6305 inner = fold_convert (intermediate_type, inner);
6308 inner = build2 (RSHIFT_EXPR, intermediate_type,
6309 inner, size_int (bitnum));
6311 if (code == EQ_EXPR)
6312 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6313 inner, integer_one_node);
6315 /* Put the AND last so it can combine with more things. */
6316 inner = build2 (BIT_AND_EXPR, intermediate_type,
6317 inner, integer_one_node);
6319 /* Make sure to return the proper type. */
6320 inner = fold_convert (result_type, inner);
6327 /* Check whether we are allowed to reorder operands arg0 and arg1,
6328 such that the evaluation of arg1 occurs before arg0. */
6331 reorder_operands_p (tree arg0, tree arg1)
6333 if (! flag_evaluation_order)
6335 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6337 return ! TREE_SIDE_EFFECTS (arg0)
6338 && ! TREE_SIDE_EFFECTS (arg1);
6341 /* Test whether it is preferable two swap two operands, ARG0 and
6342 ARG1, for example because ARG0 is an integer constant and ARG1
6343 isn't. If REORDER is true, only recommend swapping if we can
6344 evaluate the operands in reverse order. */
6347 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6349 STRIP_SIGN_NOPS (arg0);
6350 STRIP_SIGN_NOPS (arg1);
6352 if (TREE_CODE (arg1) == INTEGER_CST)
6354 if (TREE_CODE (arg0) == INTEGER_CST)
6357 if (TREE_CODE (arg1) == REAL_CST)
6359 if (TREE_CODE (arg0) == REAL_CST)
6362 if (TREE_CODE (arg1) == COMPLEX_CST)
6364 if (TREE_CODE (arg0) == COMPLEX_CST)
6367 if (TREE_CONSTANT (arg1))
6369 if (TREE_CONSTANT (arg0))
6375 if (reorder && flag_evaluation_order
6376 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6384 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6385 for commutative and comparison operators. Ensuring a canonical
6386 form allows the optimizers to find additional redundancies without
6387 having to explicitly check for both orderings. */
6388 if (TREE_CODE (arg0) == SSA_NAME
6389 && TREE_CODE (arg1) == SSA_NAME
6390 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6396 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6397 ARG0 is extended to a wider type. */
6400 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6402 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6404 tree shorter_type, outer_type;
6408 if (arg0_unw == arg0)
6410 shorter_type = TREE_TYPE (arg0_unw);
6412 #ifdef HAVE_canonicalize_funcptr_for_compare
6413 /* Disable this optimization if we're casting a function pointer
6414 type on targets that require function pointer canonicalization. */
6415 if (HAVE_canonicalize_funcptr_for_compare
6416 && TREE_CODE (shorter_type) == POINTER_TYPE
6417 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6421 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6424 arg1_unw = get_unwidened (arg1, shorter_type);
6426 /* If possible, express the comparison in the shorter mode. */
6427 if ((code == EQ_EXPR || code == NE_EXPR
6428 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6429 && (TREE_TYPE (arg1_unw) == shorter_type
6430 || (TREE_CODE (arg1_unw) == INTEGER_CST
6431 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6432 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6433 && int_fits_type_p (arg1_unw, shorter_type))))
6434 return fold_build2 (code, type, arg0_unw,
6435 fold_convert (shorter_type, arg1_unw));
6437 if (TREE_CODE (arg1_unw) != INTEGER_CST
6438 || TREE_CODE (shorter_type) != INTEGER_TYPE
6439 || !int_fits_type_p (arg1_unw, shorter_type))
6442 /* If we are comparing with the integer that does not fit into the range
6443 of the shorter type, the result is known. */
6444 outer_type = TREE_TYPE (arg1_unw);
6445 min = lower_bound_in_type (outer_type, shorter_type);
6446 max = upper_bound_in_type (outer_type, shorter_type);
6448 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6450 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6457 return omit_one_operand (type, integer_zero_node, arg0);
6462 return omit_one_operand (type, integer_one_node, arg0);
6468 return omit_one_operand (type, integer_one_node, arg0);
6470 return omit_one_operand (type, integer_zero_node, arg0);
6475 return omit_one_operand (type, integer_zero_node, arg0);
6477 return omit_one_operand (type, integer_one_node, arg0);
6486 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6487 ARG0 just the signedness is changed. */
6490 fold_sign_changed_comparison (enum tree_code code, tree type,
6491 tree arg0, tree arg1)
6493 tree arg0_inner, tmp;
6494 tree inner_type, outer_type;
6496 if (TREE_CODE (arg0) != NOP_EXPR
6497 && TREE_CODE (arg0) != CONVERT_EXPR)
6500 outer_type = TREE_TYPE (arg0);
6501 arg0_inner = TREE_OPERAND (arg0, 0);
6502 inner_type = TREE_TYPE (arg0_inner);
6504 #ifdef HAVE_canonicalize_funcptr_for_compare
6505 /* Disable this optimization if we're casting a function pointer
6506 type on targets that require function pointer canonicalization. */
6507 if (HAVE_canonicalize_funcptr_for_compare
6508 && TREE_CODE (inner_type) == POINTER_TYPE
6509 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6513 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6516 if (TREE_CODE (arg1) != INTEGER_CST
6517 && !((TREE_CODE (arg1) == NOP_EXPR
6518 || TREE_CODE (arg1) == CONVERT_EXPR)
6519 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6522 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6527 if (TREE_CODE (arg1) == INTEGER_CST)
6529 tmp = build_int_cst_wide (inner_type,
6530 TREE_INT_CST_LOW (arg1),
6531 TREE_INT_CST_HIGH (arg1));
6532 arg1 = force_fit_type (tmp, 0,
6533 TREE_OVERFLOW (arg1),
6534 TREE_CONSTANT_OVERFLOW (arg1));
6537 arg1 = fold_convert (inner_type, arg1);
6539 return fold_build2 (code, type, arg0_inner, arg1);
6542 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6543 step of the array. Reconstructs s and delta in the case of s * delta
6544 being an integer constant (and thus already folded).
6545 ADDR is the address. MULT is the multiplicative expression.
6546 If the function succeeds, the new address expression is returned. Otherwise
6547 NULL_TREE is returned. */
6550 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6552 tree s, delta, step;
6553 tree ref = TREE_OPERAND (addr, 0), pref;
6557 /* Canonicalize op1 into a possibly non-constant delta
6558 and an INTEGER_CST s. */
6559 if (TREE_CODE (op1) == MULT_EXPR)
6561 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6566 if (TREE_CODE (arg0) == INTEGER_CST)
6571 else if (TREE_CODE (arg1) == INTEGER_CST)
6579 else if (TREE_CODE (op1) == INTEGER_CST)
6586 /* Simulate we are delta * 1. */
6588 s = integer_one_node;
6591 for (;; ref = TREE_OPERAND (ref, 0))
6593 if (TREE_CODE (ref) == ARRAY_REF)
6595 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6599 step = array_ref_element_size (ref);
6600 if (TREE_CODE (step) != INTEGER_CST)
6605 if (! tree_int_cst_equal (step, s))
6610 /* Try if delta is a multiple of step. */
6611 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6620 if (!handled_component_p (ref))
6624 /* We found the suitable array reference. So copy everything up to it,
6625 and replace the index. */
6627 pref = TREE_OPERAND (addr, 0);
6628 ret = copy_node (pref);
6633 pref = TREE_OPERAND (pref, 0);
6634 TREE_OPERAND (pos, 0) = copy_node (pref);
6635 pos = TREE_OPERAND (pos, 0);
6638 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6639 fold_convert (itype,
6640 TREE_OPERAND (pos, 1)),
6641 fold_convert (itype, delta));
6643 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6647 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6648 means A >= Y && A != MAX, but in this case we know that
6649 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6652 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6654 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6656 if (TREE_CODE (bound) == LT_EXPR)
6657 a = TREE_OPERAND (bound, 0);
6658 else if (TREE_CODE (bound) == GT_EXPR)
6659 a = TREE_OPERAND (bound, 1);
6663 typea = TREE_TYPE (a);
6664 if (!INTEGRAL_TYPE_P (typea)
6665 && !POINTER_TYPE_P (typea))
6668 if (TREE_CODE (ineq) == LT_EXPR)
6670 a1 = TREE_OPERAND (ineq, 1);
6671 y = TREE_OPERAND (ineq, 0);
6673 else if (TREE_CODE (ineq) == GT_EXPR)
6675 a1 = TREE_OPERAND (ineq, 0);
6676 y = TREE_OPERAND (ineq, 1);
6681 if (TREE_TYPE (a1) != typea)
6684 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6685 if (!integer_onep (diff))
6688 return fold_build2 (GE_EXPR, type, a, y);
6691 /* Fold a sum or difference of at least one multiplication.
6692 Returns the folded tree or NULL if no simplification could be made. */
6695 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6697 tree arg00, arg01, arg10, arg11;
6698 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6700 /* (A * C) +- (B * C) -> (A+-B) * C.
6701 (A * C) +- A -> A * (C+-1).
6702 We are most concerned about the case where C is a constant,
6703 but other combinations show up during loop reduction. Since
6704 it is not difficult, try all four possibilities. */
6706 if (TREE_CODE (arg0) == MULT_EXPR)
6708 arg00 = TREE_OPERAND (arg0, 0);
6709 arg01 = TREE_OPERAND (arg0, 1);
6714 arg01 = fold_convert (type, integer_one_node);
6716 if (TREE_CODE (arg1) == MULT_EXPR)
6718 arg10 = TREE_OPERAND (arg1, 0);
6719 arg11 = TREE_OPERAND (arg1, 1);
6724 arg11 = fold_convert (type, integer_one_node);
6728 if (operand_equal_p (arg01, arg11, 0))
6729 same = arg01, alt0 = arg00, alt1 = arg10;
6730 else if (operand_equal_p (arg00, arg10, 0))
6731 same = arg00, alt0 = arg01, alt1 = arg11;
6732 else if (operand_equal_p (arg00, arg11, 0))
6733 same = arg00, alt0 = arg01, alt1 = arg10;
6734 else if (operand_equal_p (arg01, arg10, 0))
6735 same = arg01, alt0 = arg00, alt1 = arg11;
6737 /* No identical multiplicands; see if we can find a common
6738 power-of-two factor in non-power-of-two multiplies. This
6739 can help in multi-dimensional array access. */
6740 else if (host_integerp (arg01, 0)
6741 && host_integerp (arg11, 0))
6743 HOST_WIDE_INT int01, int11, tmp;
6746 int01 = TREE_INT_CST_LOW (arg01);
6747 int11 = TREE_INT_CST_LOW (arg11);
6749 /* Move min of absolute values to int11. */
6750 if ((int01 >= 0 ? int01 : -int01)
6751 < (int11 >= 0 ? int11 : -int11))
6753 tmp = int01, int01 = int11, int11 = tmp;
6754 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6761 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6763 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6764 build_int_cst (TREE_TYPE (arg00),
6769 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6774 return fold_build2 (MULT_EXPR, type,
6775 fold_build2 (code, type,
6776 fold_convert (type, alt0),
6777 fold_convert (type, alt1)),
6778 fold_convert (type, same));
6783 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6784 specified by EXPR into the buffer PTR of length LEN bytes.
6785 Return the number of bytes placed in the buffer, or zero
6789 native_encode_int (tree expr, unsigned char *ptr, int len)
6791 tree type = TREE_TYPE (expr);
6792 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6793 int byte, offset, word, words;
6794 unsigned char value;
6796 if (total_bytes > len)
6798 words = total_bytes / UNITS_PER_WORD;
6800 for (byte = 0; byte < total_bytes; byte++)
6802 int bitpos = byte * BITS_PER_UNIT;
6803 if (bitpos < HOST_BITS_PER_WIDE_INT)
6804 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6806 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6807 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6809 if (total_bytes > UNITS_PER_WORD)
6811 word = byte / UNITS_PER_WORD;
6812 if (WORDS_BIG_ENDIAN)
6813 word = (words - 1) - word;
6814 offset = word * UNITS_PER_WORD;
6815 if (BYTES_BIG_ENDIAN)
6816 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6818 offset += byte % UNITS_PER_WORD;
6821 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6822 ptr[offset] = value;
6828 /* Subroutine of native_encode_expr. Encode the REAL_CST
6829 specified by EXPR into the buffer PTR of length LEN bytes.
6830 Return the number of bytes placed in the buffer, or zero
6834 native_encode_real (tree expr, unsigned char *ptr, int len)
6836 tree type = TREE_TYPE (expr);
6837 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6838 int byte, offset, word, words;
6839 unsigned char value;
6841 /* There are always 32 bits in each long, no matter the size of
6842 the hosts long. We handle floating point representations with
6846 if (total_bytes > len)
6848 words = total_bytes / UNITS_PER_WORD;
6850 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6852 for (byte = 0; byte < total_bytes; byte++)
6854 int bitpos = byte * BITS_PER_UNIT;
6855 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6857 if (total_bytes > UNITS_PER_WORD)
6859 word = byte / UNITS_PER_WORD;
6860 if (FLOAT_WORDS_BIG_ENDIAN)
6861 word = (words - 1) - word;
6862 offset = word * UNITS_PER_WORD;
6863 if (BYTES_BIG_ENDIAN)
6864 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6866 offset += byte % UNITS_PER_WORD;
6869 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6870 ptr[offset] = value;
6875 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6876 specified by EXPR into the buffer PTR of length LEN bytes.
6877 Return the number of bytes placed in the buffer, or zero
6881 native_encode_complex (tree expr, unsigned char *ptr, int len)
6886 part = TREE_REALPART (expr);
6887 rsize = native_encode_expr (part, ptr, len);
6890 part = TREE_IMAGPART (expr);
6891 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6894 return rsize + isize;
6898 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6899 specified by EXPR into the buffer PTR of length LEN bytes.
6900 Return the number of bytes placed in the buffer, or zero
6904 native_encode_vector (tree expr, unsigned char *ptr, int len)
6906 int i, size, offset, count;
6907 tree elem, elements;
6911 elements = TREE_VECTOR_CST_ELTS (expr);
6912 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6913 for (i = 0; i < count; i++)
6917 elem = TREE_VALUE (elements);
6918 elements = TREE_CHAIN (elements);
6925 size = native_encode_expr (elem, ptr+offset, len-offset);
6931 if (offset + size > len)
6933 memset (ptr+offset, 0, size);
6943 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6944 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6945 buffer PTR of length LEN bytes. Return the number of bytes
6946 placed in the buffer, or zero upon failure. */
6949 native_encode_expr (tree expr, unsigned char *ptr, int len)
6951 switch (TREE_CODE (expr))
6954 return native_encode_int (expr, ptr, len);
6957 return native_encode_real (expr, ptr, len);
6960 return native_encode_complex (expr, ptr, len);
6963 return native_encode_vector (expr, ptr, len);
6971 /* Subroutine of native_interpret_expr. Interpret the contents of
6972 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6973 If the buffer cannot be interpreted, return NULL_TREE. */
6976 native_interpret_int (tree type, unsigned char *ptr, int len)
6978 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6979 int byte, offset, word, words;
6980 unsigned char value;
6981 unsigned int HOST_WIDE_INT lo = 0;
6982 HOST_WIDE_INT hi = 0;
6984 if (total_bytes > len)
6986 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
6988 words = total_bytes / UNITS_PER_WORD;
6990 for (byte = 0; byte < total_bytes; byte++)
6992 int bitpos = byte * BITS_PER_UNIT;
6993 if (total_bytes > UNITS_PER_WORD)
6995 word = byte / UNITS_PER_WORD;
6996 if (WORDS_BIG_ENDIAN)
6997 word = (words - 1) - word;
6998 offset = word * UNITS_PER_WORD;
6999 if (BYTES_BIG_ENDIAN)
7000 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7002 offset += byte % UNITS_PER_WORD;
7005 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7006 value = ptr[offset];
7008 if (bitpos < HOST_BITS_PER_WIDE_INT)
7009 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7011 hi |= (unsigned HOST_WIDE_INT) value
7012 << (bitpos - HOST_BITS_PER_WIDE_INT);
7015 return force_fit_type (build_int_cst_wide (type, lo, hi),
7020 /* Subroutine of native_interpret_expr. Interpret the contents of
7021 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7022 If the buffer cannot be interpreted, return NULL_TREE. */
7025 native_interpret_real (tree type, unsigned char *ptr, int len)
7027 enum machine_mode mode = TYPE_MODE (type);
7028 int total_bytes = GET_MODE_SIZE (mode);
7029 int byte, offset, word, words;
7030 unsigned char value;
7031 /* There are always 32 bits in each long, no matter the size of
7032 the hosts long. We handle floating point representations with
7037 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7038 if (total_bytes > len || total_bytes > 24)
7040 words = total_bytes / UNITS_PER_WORD;
7042 memset (tmp, 0, sizeof (tmp));
7043 for (byte = 0; byte < total_bytes; byte++)
7045 int bitpos = byte * BITS_PER_UNIT;
7046 if (total_bytes > UNITS_PER_WORD)
7048 word = byte / UNITS_PER_WORD;
7049 if (FLOAT_WORDS_BIG_ENDIAN)
7050 word = (words - 1) - word;
7051 offset = word * UNITS_PER_WORD;
7052 if (BYTES_BIG_ENDIAN)
7053 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7055 offset += byte % UNITS_PER_WORD;
7058 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7059 value = ptr[offset];
7061 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7064 real_from_target (&r, tmp, mode);
7065 return build_real (type, r);
7069 /* Subroutine of native_interpret_expr. Interpret the contents of
7070 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7071 If the buffer cannot be interpreted, return NULL_TREE. */
7074 native_interpret_complex (tree type, unsigned char *ptr, int len)
7076 tree etype, rpart, ipart;
7079 etype = TREE_TYPE (type);
7080 size = GET_MODE_SIZE (TYPE_MODE (etype));
7083 rpart = native_interpret_expr (etype, ptr, size);
7086 ipart = native_interpret_expr (etype, ptr+size, size);
7089 return build_complex (type, rpart, ipart);
7093 /* Subroutine of native_interpret_expr. Interpret the contents of
7094 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7095 If the buffer cannot be interpreted, return NULL_TREE. */
7098 native_interpret_vector (tree type, unsigned char *ptr, int len)
7100 tree etype, elem, elements;
7103 etype = TREE_TYPE (type);
7104 size = GET_MODE_SIZE (TYPE_MODE (etype));
7105 count = TYPE_VECTOR_SUBPARTS (type);
7106 if (size * count > len)
7109 elements = NULL_TREE;
7110 for (i = count - 1; i >= 0; i--)
7112 elem = native_interpret_expr (etype, ptr+(i*size), size);
7115 elements = tree_cons (NULL_TREE, elem, elements);
7117 return build_vector (type, elements);
7121 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7122 the buffer PTR of length LEN as a constant of type TYPE. For
7123 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7124 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7125 return NULL_TREE. */
7128 native_interpret_expr (tree type, unsigned char *ptr, int len)
7130 switch (TREE_CODE (type))
7135 return native_interpret_int (type, ptr, len);
7138 return native_interpret_real (type, ptr, len);
7141 return native_interpret_complex (type, ptr, len);
7144 return native_interpret_vector (type, ptr, len);
7152 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7153 TYPE at compile-time. If we're unable to perform the conversion
7154 return NULL_TREE. */
7157 fold_view_convert_expr (tree type, tree expr)
7159 /* We support up to 512-bit values (for V8DFmode). */
7160 unsigned char buffer[64];
7163 /* Check that the host and target are sane. */
7164 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7167 len = native_encode_expr (expr, buffer, sizeof (buffer));
7171 return native_interpret_expr (type, buffer, len);
7175 /* Fold a unary expression of code CODE and type TYPE with operand
7176 OP0. Return the folded expression if folding is successful.
7177 Otherwise, return NULL_TREE. */
7180 fold_unary (enum tree_code code, tree type, tree op0)
7184 enum tree_code_class kind = TREE_CODE_CLASS (code);
7186 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7187 && TREE_CODE_LENGTH (code) == 1);
7192 if (code == NOP_EXPR || code == CONVERT_EXPR
7193 || code == FLOAT_EXPR || code == ABS_EXPR)
7195 /* Don't use STRIP_NOPS, because signedness of argument type
7197 STRIP_SIGN_NOPS (arg0);
7201 /* Strip any conversions that don't change the mode. This
7202 is safe for every expression, except for a comparison
7203 expression because its signedness is derived from its
7206 Note that this is done as an internal manipulation within
7207 the constant folder, in order to find the simplest
7208 representation of the arguments so that their form can be
7209 studied. In any cases, the appropriate type conversions
7210 should be put back in the tree that will get out of the
7216 if (TREE_CODE_CLASS (code) == tcc_unary)
7218 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7219 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7220 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7221 else if (TREE_CODE (arg0) == COND_EXPR)
7223 tree arg01 = TREE_OPERAND (arg0, 1);
7224 tree arg02 = TREE_OPERAND (arg0, 2);
7225 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7226 arg01 = fold_build1 (code, type, arg01);
7227 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7228 arg02 = fold_build1 (code, type, arg02);
7229 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7232 /* If this was a conversion, and all we did was to move into
7233 inside the COND_EXPR, bring it back out. But leave it if
7234 it is a conversion from integer to integer and the
7235 result precision is no wider than a word since such a
7236 conversion is cheap and may be optimized away by combine,
7237 while it couldn't if it were outside the COND_EXPR. Then return
7238 so we don't get into an infinite recursion loop taking the
7239 conversion out and then back in. */
7241 if ((code == NOP_EXPR || code == CONVERT_EXPR
7242 || code == NON_LVALUE_EXPR)
7243 && TREE_CODE (tem) == COND_EXPR
7244 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7245 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7246 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7247 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7248 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7249 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7250 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7252 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7253 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7254 || flag_syntax_only))
7255 tem = build1 (code, type,
7257 TREE_TYPE (TREE_OPERAND
7258 (TREE_OPERAND (tem, 1), 0)),
7259 TREE_OPERAND (tem, 0),
7260 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7261 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7264 else if (COMPARISON_CLASS_P (arg0))
7266 if (TREE_CODE (type) == BOOLEAN_TYPE)
7268 arg0 = copy_node (arg0);
7269 TREE_TYPE (arg0) = type;
7272 else if (TREE_CODE (type) != INTEGER_TYPE)
7273 return fold_build3 (COND_EXPR, type, arg0,
7274 fold_build1 (code, type,
7276 fold_build1 (code, type,
7277 integer_zero_node));
7286 case FIX_TRUNC_EXPR:
7288 case FIX_FLOOR_EXPR:
7289 case FIX_ROUND_EXPR:
7290 if (TREE_TYPE (op0) == type)
7293 /* If we have (type) (a CMP b) and type is an integral type, return
7294 new expression involving the new type. */
7295 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7296 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7297 TREE_OPERAND (op0, 1));
7299 /* Handle cases of two conversions in a row. */
7300 if (TREE_CODE (op0) == NOP_EXPR
7301 || TREE_CODE (op0) == CONVERT_EXPR)
7303 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7304 tree inter_type = TREE_TYPE (op0);
7305 int inside_int = INTEGRAL_TYPE_P (inside_type);
7306 int inside_ptr = POINTER_TYPE_P (inside_type);
7307 int inside_float = FLOAT_TYPE_P (inside_type);
7308 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7309 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7310 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7311 int inter_int = INTEGRAL_TYPE_P (inter_type);
7312 int inter_ptr = POINTER_TYPE_P (inter_type);
7313 int inter_float = FLOAT_TYPE_P (inter_type);
7314 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7315 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7316 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7317 int final_int = INTEGRAL_TYPE_P (type);
7318 int final_ptr = POINTER_TYPE_P (type);
7319 int final_float = FLOAT_TYPE_P (type);
7320 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7321 unsigned int final_prec = TYPE_PRECISION (type);
7322 int final_unsignedp = TYPE_UNSIGNED (type);
7324 /* In addition to the cases of two conversions in a row
7325 handled below, if we are converting something to its own
7326 type via an object of identical or wider precision, neither
7327 conversion is needed. */
7328 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7329 && (((inter_int || inter_ptr) && final_int)
7330 || (inter_float && final_float))
7331 && inter_prec >= final_prec)
7332 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7334 /* Likewise, if the intermediate and final types are either both
7335 float or both integer, we don't need the middle conversion if
7336 it is wider than the final type and doesn't change the signedness
7337 (for integers). Avoid this if the final type is a pointer
7338 since then we sometimes need the inner conversion. Likewise if
7339 the outer has a precision not equal to the size of its mode. */
7340 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7341 || (inter_float && inside_float)
7342 || (inter_vec && inside_vec))
7343 && inter_prec >= inside_prec
7344 && (inter_float || inter_vec
7345 || inter_unsignedp == inside_unsignedp)
7346 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7347 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7349 && (! final_vec || inter_prec == inside_prec))
7350 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7352 /* If we have a sign-extension of a zero-extended value, we can
7353 replace that by a single zero-extension. */
7354 if (inside_int && inter_int && final_int
7355 && inside_prec < inter_prec && inter_prec < final_prec
7356 && inside_unsignedp && !inter_unsignedp)
7357 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7359 /* Two conversions in a row are not needed unless:
7360 - some conversion is floating-point (overstrict for now), or
7361 - some conversion is a vector (overstrict for now), or
7362 - the intermediate type is narrower than both initial and
7364 - the intermediate type and innermost type differ in signedness,
7365 and the outermost type is wider than the intermediate, or
7366 - the initial type is a pointer type and the precisions of the
7367 intermediate and final types differ, or
7368 - the final type is a pointer type and the precisions of the
7369 initial and intermediate types differ.
7370 - the final type is a pointer type and the initial type not
7371 - the initial type is a pointer to an array and the final type
7373 if (! inside_float && ! inter_float && ! final_float
7374 && ! inside_vec && ! inter_vec && ! final_vec
7375 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7376 && ! (inside_int && inter_int
7377 && inter_unsignedp != inside_unsignedp
7378 && inter_prec < final_prec)
7379 && ((inter_unsignedp && inter_prec > inside_prec)
7380 == (final_unsignedp && final_prec > inter_prec))
7381 && ! (inside_ptr && inter_prec != final_prec)
7382 && ! (final_ptr && inside_prec != inter_prec)
7383 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7384 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7385 && final_ptr == inside_ptr
7387 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7388 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7389 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7392 /* Handle (T *)&A.B.C for A being of type T and B and C
7393 living at offset zero. This occurs frequently in
7394 C++ upcasting and then accessing the base. */
7395 if (TREE_CODE (op0) == ADDR_EXPR
7396 && POINTER_TYPE_P (type)
7397 && handled_component_p (TREE_OPERAND (op0, 0)))
7399 HOST_WIDE_INT bitsize, bitpos;
7401 enum machine_mode mode;
7402 int unsignedp, volatilep;
7403 tree base = TREE_OPERAND (op0, 0);
7404 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7405 &mode, &unsignedp, &volatilep, false);
7406 /* If the reference was to a (constant) zero offset, we can use
7407 the address of the base if it has the same base type
7408 as the result type. */
7409 if (! offset && bitpos == 0
7410 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7411 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7412 return fold_convert (type, build_fold_addr_expr (base));
7415 if (TREE_CODE (op0) == MODIFY_EXPR
7416 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7417 /* Detect assigning a bitfield. */
7418 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7419 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7421 /* Don't leave an assignment inside a conversion
7422 unless assigning a bitfield. */
7423 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7424 /* First do the assignment, then return converted constant. */
7425 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7426 TREE_NO_WARNING (tem) = 1;
7427 TREE_USED (tem) = 1;
7431 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7432 constants (if x has signed type, the sign bit cannot be set
7433 in c). This folds extension into the BIT_AND_EXPR. */
7434 if (INTEGRAL_TYPE_P (type)
7435 && TREE_CODE (type) != BOOLEAN_TYPE
7436 && TREE_CODE (op0) == BIT_AND_EXPR
7437 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7440 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7443 if (TYPE_UNSIGNED (TREE_TYPE (and))
7444 || (TYPE_PRECISION (type)
7445 <= TYPE_PRECISION (TREE_TYPE (and))))
7447 else if (TYPE_PRECISION (TREE_TYPE (and1))
7448 <= HOST_BITS_PER_WIDE_INT
7449 && host_integerp (and1, 1))
7451 unsigned HOST_WIDE_INT cst;
7453 cst = tree_low_cst (and1, 1);
7454 cst &= (HOST_WIDE_INT) -1
7455 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7456 change = (cst == 0);
7457 #ifdef LOAD_EXTEND_OP
7459 && !flag_syntax_only
7460 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7463 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7464 and0 = fold_convert (uns, and0);
7465 and1 = fold_convert (uns, and1);
7471 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7472 TREE_INT_CST_HIGH (and1));
7473 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7474 TREE_CONSTANT_OVERFLOW (and1));
7475 return fold_build2 (BIT_AND_EXPR, type,
7476 fold_convert (type, and0), tem);
7480 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7481 T2 being pointers to types of the same size. */
7482 if (POINTER_TYPE_P (type)
7483 && BINARY_CLASS_P (arg0)
7484 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7485 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7487 tree arg00 = TREE_OPERAND (arg0, 0);
7489 tree t1 = TREE_TYPE (arg00);
7490 tree tt0 = TREE_TYPE (t0);
7491 tree tt1 = TREE_TYPE (t1);
7492 tree s0 = TYPE_SIZE (tt0);
7493 tree s1 = TYPE_SIZE (tt1);
7495 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7496 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7497 TREE_OPERAND (arg0, 1));
7500 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7501 of the same precision, and X is a integer type not narrower than
7502 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7503 if (INTEGRAL_TYPE_P (type)
7504 && TREE_CODE (op0) == BIT_NOT_EXPR
7505 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7506 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7507 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7508 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7510 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7511 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7512 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7513 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7516 tem = fold_convert_const (code, type, arg0);
7517 return tem ? tem : NULL_TREE;
7519 case VIEW_CONVERT_EXPR:
7520 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7521 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7522 return fold_view_convert_expr (type, op0);
7525 if (negate_expr_p (arg0))
7526 return fold_convert (type, negate_expr (arg0));
7530 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7531 return fold_abs_const (arg0, type);
7532 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7533 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7534 /* Convert fabs((double)float) into (double)fabsf(float). */
7535 else if (TREE_CODE (arg0) == NOP_EXPR
7536 && TREE_CODE (type) == REAL_TYPE)
7538 tree targ0 = strip_float_extensions (arg0);
7540 return fold_convert (type, fold_build1 (ABS_EXPR,
7544 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7545 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7548 /* Strip sign ops from argument. */
7549 if (TREE_CODE (type) == REAL_TYPE)
7551 tem = fold_strip_sign_ops (arg0);
7553 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7558 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7559 return fold_convert (type, arg0);
7560 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7562 tree itype = TREE_TYPE (type);
7563 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7564 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7565 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7567 if (TREE_CODE (arg0) == COMPLEX_CST)
7569 tree itype = TREE_TYPE (type);
7570 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7571 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7572 return build_complex (type, rpart, negate_expr (ipart));
7574 if (TREE_CODE (arg0) == CONJ_EXPR)
7575 return fold_convert (type, TREE_OPERAND (arg0, 0));
7579 if (TREE_CODE (arg0) == INTEGER_CST)
7580 return fold_not_const (arg0, type);
7581 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7582 return TREE_OPERAND (arg0, 0);
7583 /* Convert ~ (-A) to A - 1. */
7584 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7585 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7586 build_int_cst (type, 1));
7587 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7588 else if (INTEGRAL_TYPE_P (type)
7589 && ((TREE_CODE (arg0) == MINUS_EXPR
7590 && integer_onep (TREE_OPERAND (arg0, 1)))
7591 || (TREE_CODE (arg0) == PLUS_EXPR
7592 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7593 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7594 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7595 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7596 && (tem = fold_unary (BIT_NOT_EXPR, type,
7598 TREE_OPERAND (arg0, 0)))))
7599 return fold_build2 (BIT_XOR_EXPR, type, tem,
7600 fold_convert (type, TREE_OPERAND (arg0, 1)));
7601 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7602 && (tem = fold_unary (BIT_NOT_EXPR, type,
7604 TREE_OPERAND (arg0, 1)))))
7605 return fold_build2 (BIT_XOR_EXPR, type,
7606 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7610 case TRUTH_NOT_EXPR:
7611 /* The argument to invert_truthvalue must have Boolean type. */
7612 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7613 arg0 = fold_convert (boolean_type_node, arg0);
7615 /* Note that the operand of this must be an int
7616 and its values must be 0 or 1.
7617 ("true" is a fixed value perhaps depending on the language,
7618 but we don't handle values other than 1 correctly yet.) */
7619 tem = invert_truthvalue (arg0);
7620 /* Avoid infinite recursion. */
7621 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7623 return fold_convert (type, tem);
7626 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7627 return fold_convert (type, arg0);
7628 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7629 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7630 TREE_OPERAND (arg0, 1));
7631 if (TREE_CODE (arg0) == COMPLEX_CST)
7632 return fold_convert (type, TREE_REALPART (arg0));
7633 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7635 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7636 tem = fold_build2 (TREE_CODE (arg0), itype,
7637 fold_build1 (REALPART_EXPR, itype,
7638 TREE_OPERAND (arg0, 0)),
7639 fold_build1 (REALPART_EXPR, itype,
7640 TREE_OPERAND (arg0, 1)));
7641 return fold_convert (type, tem);
7643 if (TREE_CODE (arg0) == CONJ_EXPR)
7645 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7646 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7647 return fold_convert (type, tem);
7652 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7653 return fold_convert (type, integer_zero_node);
7654 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7655 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7656 TREE_OPERAND (arg0, 0));
7657 if (TREE_CODE (arg0) == COMPLEX_CST)
7658 return fold_convert (type, TREE_IMAGPART (arg0));
7659 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7661 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7662 tem = fold_build2 (TREE_CODE (arg0), itype,
7663 fold_build1 (IMAGPART_EXPR, itype,
7664 TREE_OPERAND (arg0, 0)),
7665 fold_build1 (IMAGPART_EXPR, itype,
7666 TREE_OPERAND (arg0, 1)));
7667 return fold_convert (type, tem);
7669 if (TREE_CODE (arg0) == CONJ_EXPR)
7671 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7672 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7673 return fold_convert (type, negate_expr (tem));
7679 } /* switch (code) */
7682 /* Fold a binary expression of code CODE and type TYPE with operands
7683 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7684 Return the folded expression if folding is successful. Otherwise,
7685 return NULL_TREE. */
7688 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7690 enum tree_code compl_code;
7692 if (code == MIN_EXPR)
7693 compl_code = MAX_EXPR;
7694 else if (code == MAX_EXPR)
7695 compl_code = MIN_EXPR;
7699 /* MIN (MAX (a, b), b) == b. Â */
7700 if (TREE_CODE (op0) == compl_code
7701 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7702 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7704 /* MIN (MAX (b, a), b) == b. Â */
7705 if (TREE_CODE (op0) == compl_code
7706 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7707 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7708 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7710 /* MIN (a, MAX (a, b)) == a. Â */
7711 if (TREE_CODE (op1) == compl_code
7712 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7713 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7714 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7716 /* MIN (a, MAX (b, a)) == a. Â */
7717 if (TREE_CODE (op1) == compl_code
7718 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7719 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7720 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7725 /* Subroutine of fold_binary. This routine performs all of the
7726 transformations that are common to the equality/inequality
7727 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7728 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7729 fold_binary should call fold_binary. Fold a comparison with
7730 tree code CODE and type TYPE with operands OP0 and OP1. Return
7731 the folded comparison or NULL_TREE. */
7734 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7736 tree arg0, arg1, tem;
7741 STRIP_SIGN_NOPS (arg0);
7742 STRIP_SIGN_NOPS (arg1);
7744 tem = fold_relational_const (code, type, arg0, arg1);
7745 if (tem != NULL_TREE)
7748 /* If one arg is a real or integer constant, put it last. */
7749 if (tree_swap_operands_p (arg0, arg1, true))
7750 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7752 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7753 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7754 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7755 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7756 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7757 && !(flag_wrapv || flag_trapv))
7758 && (TREE_CODE (arg1) == INTEGER_CST
7759 && !TREE_OVERFLOW (arg1)))
7761 tree const1 = TREE_OPERAND (arg0, 1);
7763 tree variable = TREE_OPERAND (arg0, 0);
7766 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7768 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7769 TREE_TYPE (arg1), const2, const1);
7770 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7771 && (TREE_CODE (lhs) != INTEGER_CST
7772 || !TREE_OVERFLOW (lhs)))
7773 return fold_build2 (code, type, variable, lhs);
7776 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7777 same object, then we can fold this to a comparison of the two offsets in
7778 signed size type. This is possible because pointer arithmetic is
7779 restricted to retain within an object and overflow on pointer differences
7780 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7781 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7782 && !flag_wrapv && !flag_trapv)
7784 tree base0, offset0, base1, offset1;
7786 if (extract_array_ref (arg0, &base0, &offset0)
7787 && extract_array_ref (arg1, &base1, &offset1)
7788 && operand_equal_p (base0, base1, 0))
7790 tree signed_size_type_node;
7791 signed_size_type_node = signed_type_for (size_type_node);
7793 /* By converting to signed size type we cover middle-end pointer
7794 arithmetic which operates on unsigned pointer types of size
7795 type size and ARRAY_REF offsets which are properly sign or
7796 zero extended from their type in case it is narrower than
7798 if (offset0 == NULL_TREE)
7799 offset0 = build_int_cst (signed_size_type_node, 0);
7801 offset0 = fold_convert (signed_size_type_node, offset0);
7802 if (offset1 == NULL_TREE)
7803 offset1 = build_int_cst (signed_size_type_node, 0);
7805 offset1 = fold_convert (signed_size_type_node, offset1);
7807 return fold_build2 (code, type, offset0, offset1);
7811 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7813 tree targ0 = strip_float_extensions (arg0);
7814 tree targ1 = strip_float_extensions (arg1);
7815 tree newtype = TREE_TYPE (targ0);
7817 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7818 newtype = TREE_TYPE (targ1);
7820 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7821 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7822 return fold_build2 (code, type, fold_convert (newtype, targ0),
7823 fold_convert (newtype, targ1));
7825 /* (-a) CMP (-b) -> b CMP a */
7826 if (TREE_CODE (arg0) == NEGATE_EXPR
7827 && TREE_CODE (arg1) == NEGATE_EXPR)
7828 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7829 TREE_OPERAND (arg0, 0));
7831 if (TREE_CODE (arg1) == REAL_CST)
7833 REAL_VALUE_TYPE cst;
7834 cst = TREE_REAL_CST (arg1);
7836 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7837 if (TREE_CODE (arg0) == NEGATE_EXPR)
7838 return fold_build2 (swap_tree_comparison (code), type,
7839 TREE_OPERAND (arg0, 0),
7840 build_real (TREE_TYPE (arg1),
7841 REAL_VALUE_NEGATE (cst)));
7843 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7844 /* a CMP (-0) -> a CMP 0 */
7845 if (REAL_VALUE_MINUS_ZERO (cst))
7846 return fold_build2 (code, type, arg0,
7847 build_real (TREE_TYPE (arg1), dconst0));
7849 /* x != NaN is always true, other ops are always false. */
7850 if (REAL_VALUE_ISNAN (cst)
7851 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7853 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7854 return omit_one_operand (type, tem, arg0);
7857 /* Fold comparisons against infinity. */
7858 if (REAL_VALUE_ISINF (cst))
7860 tem = fold_inf_compare (code, type, arg0, arg1);
7861 if (tem != NULL_TREE)
7866 /* If this is a comparison of a real constant with a PLUS_EXPR
7867 or a MINUS_EXPR of a real constant, we can convert it into a
7868 comparison with a revised real constant as long as no overflow
7869 occurs when unsafe_math_optimizations are enabled. */
7870 if (flag_unsafe_math_optimizations
7871 && TREE_CODE (arg1) == REAL_CST
7872 && (TREE_CODE (arg0) == PLUS_EXPR
7873 || TREE_CODE (arg0) == MINUS_EXPR)
7874 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7875 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7876 ? MINUS_EXPR : PLUS_EXPR,
7877 arg1, TREE_OPERAND (arg0, 1), 0))
7878 && ! TREE_CONSTANT_OVERFLOW (tem))
7879 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7881 /* Likewise, we can simplify a comparison of a real constant with
7882 a MINUS_EXPR whose first operand is also a real constant, i.e.
7883 (c1 - x) < c2 becomes x > c1-c2. */
7884 if (flag_unsafe_math_optimizations
7885 && TREE_CODE (arg1) == REAL_CST
7886 && TREE_CODE (arg0) == MINUS_EXPR
7887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7888 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7890 && ! TREE_CONSTANT_OVERFLOW (tem))
7891 return fold_build2 (swap_tree_comparison (code), type,
7892 TREE_OPERAND (arg0, 1), tem);
7894 /* Fold comparisons against built-in math functions. */
7895 if (TREE_CODE (arg1) == REAL_CST
7896 && flag_unsafe_math_optimizations
7897 && ! flag_errno_math)
7899 enum built_in_function fcode = builtin_mathfn_code (arg0);
7901 if (fcode != END_BUILTINS)
7903 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7904 if (tem != NULL_TREE)
7910 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7911 if (TREE_CONSTANT (arg1)
7912 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7913 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7914 /* This optimization is invalid for ordered comparisons
7915 if CONST+INCR overflows or if foo+incr might overflow.
7916 This optimization is invalid for floating point due to rounding.
7917 For pointer types we assume overflow doesn't happen. */
7918 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7919 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7920 && (code == EQ_EXPR || code == NE_EXPR))))
7922 tree varop, newconst;
7924 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7926 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7927 arg1, TREE_OPERAND (arg0, 1));
7928 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7929 TREE_OPERAND (arg0, 0),
7930 TREE_OPERAND (arg0, 1));
7934 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7935 arg1, TREE_OPERAND (arg0, 1));
7936 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7937 TREE_OPERAND (arg0, 0),
7938 TREE_OPERAND (arg0, 1));
7942 /* If VAROP is a reference to a bitfield, we must mask
7943 the constant by the width of the field. */
7944 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7945 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7946 && host_integerp (DECL_SIZE (TREE_OPERAND
7947 (TREE_OPERAND (varop, 0), 1)), 1))
7949 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7950 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7951 tree folded_compare, shift;
7953 /* First check whether the comparison would come out
7954 always the same. If we don't do that we would
7955 change the meaning with the masking. */
7956 folded_compare = fold_build2 (code, type,
7957 TREE_OPERAND (varop, 0), arg1);
7958 if (TREE_CODE (folded_compare) == INTEGER_CST)
7959 return omit_one_operand (type, folded_compare, varop);
7961 shift = build_int_cst (NULL_TREE,
7962 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7963 shift = fold_convert (TREE_TYPE (varop), shift);
7964 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7966 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7970 return fold_build2 (code, type, varop, newconst);
7973 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7974 && (TREE_CODE (arg0) == NOP_EXPR
7975 || TREE_CODE (arg0) == CONVERT_EXPR))
7977 /* If we are widening one operand of an integer comparison,
7978 see if the other operand is similarly being widened. Perhaps we
7979 can do the comparison in the narrower type. */
7980 tem = fold_widened_comparison (code, type, arg0, arg1);
7984 /* Or if we are changing signedness. */
7985 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
7990 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7991 constant, we can simplify it. */
7992 if (TREE_CODE (arg1) == INTEGER_CST
7993 && (TREE_CODE (arg0) == MIN_EXPR
7994 || TREE_CODE (arg0) == MAX_EXPR)
7995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7997 tem = optimize_minmax_comparison (code, type, op0, op1);
8002 /* Simplify comparison of something with itself. (For IEEE
8003 floating-point, we can only do some of these simplifications.) */
8004 if (operand_equal_p (arg0, arg1, 0))
8009 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8010 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8011 return constant_boolean_node (1, type);
8016 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8017 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8018 return constant_boolean_node (1, type);
8019 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8022 /* For NE, we can only do this simplification if integer
8023 or we don't honor IEEE floating point NaNs. */
8024 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8025 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8027 /* ... fall through ... */
8030 return constant_boolean_node (0, type);
8036 /* If we are comparing an expression that just has comparisons
8037 of two integer values, arithmetic expressions of those comparisons,
8038 and constants, we can simplify it. There are only three cases
8039 to check: the two values can either be equal, the first can be
8040 greater, or the second can be greater. Fold the expression for
8041 those three values. Since each value must be 0 or 1, we have
8042 eight possibilities, each of which corresponds to the constant 0
8043 or 1 or one of the six possible comparisons.
8045 This handles common cases like (a > b) == 0 but also handles
8046 expressions like ((x > y) - (y > x)) > 0, which supposedly
8047 occur in macroized code. */
8049 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8051 tree cval1 = 0, cval2 = 0;
8054 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8055 /* Don't handle degenerate cases here; they should already
8056 have been handled anyway. */
8057 && cval1 != 0 && cval2 != 0
8058 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8059 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8060 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8061 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8062 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8063 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8064 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8066 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8067 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8069 /* We can't just pass T to eval_subst in case cval1 or cval2
8070 was the same as ARG1. */
8073 = fold_build2 (code, type,
8074 eval_subst (arg0, cval1, maxval,
8078 = fold_build2 (code, type,
8079 eval_subst (arg0, cval1, maxval,
8083 = fold_build2 (code, type,
8084 eval_subst (arg0, cval1, minval,
8088 /* All three of these results should be 0 or 1. Confirm they are.
8089 Then use those values to select the proper code to use. */
8091 if (TREE_CODE (high_result) == INTEGER_CST
8092 && TREE_CODE (equal_result) == INTEGER_CST
8093 && TREE_CODE (low_result) == INTEGER_CST)
8095 /* Make a 3-bit mask with the high-order bit being the
8096 value for `>', the next for '=', and the low for '<'. */
8097 switch ((integer_onep (high_result) * 4)
8098 + (integer_onep (equal_result) * 2)
8099 + integer_onep (low_result))
8103 return omit_one_operand (type, integer_zero_node, arg0);
8124 return omit_one_operand (type, integer_one_node, arg0);
8128 return save_expr (build2 (code, type, cval1, cval2));
8129 return fold_build2 (code, type, cval1, cval2);
8134 /* Fold a comparison of the address of COMPONENT_REFs with the same
8135 type and component to a comparison of the address of the base
8136 object. In short, &x->a OP &y->a to x OP y and
8137 &x->a OP &y.a to x OP &y */
8138 if (TREE_CODE (arg0) == ADDR_EXPR
8139 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8140 && TREE_CODE (arg1) == ADDR_EXPR
8141 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8143 tree cref0 = TREE_OPERAND (arg0, 0);
8144 tree cref1 = TREE_OPERAND (arg1, 0);
8145 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8147 tree op0 = TREE_OPERAND (cref0, 0);
8148 tree op1 = TREE_OPERAND (cref1, 0);
8149 return fold_build2 (code, type,
8150 build_fold_addr_expr (op0),
8151 build_fold_addr_expr (op1));
8155 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8156 into a single range test. */
8157 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8158 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8159 && TREE_CODE (arg1) == INTEGER_CST
8160 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8161 && !integer_zerop (TREE_OPERAND (arg0, 1))
8162 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8163 && !TREE_OVERFLOW (arg1))
8165 tem = fold_div_compare (code, type, arg0, arg1);
8166 if (tem != NULL_TREE)
8174 /* Subroutine of fold_binary. Optimize complex multiplications of the
8175 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8176 argument EXPR represents the expression "z" of type TYPE. */
8179 fold_mult_zconjz (tree type, tree expr)
8181 tree itype = TREE_TYPE (type);
8182 tree rpart, ipart, tem;
8184 if (TREE_CODE (expr) == COMPLEX_EXPR)
8186 rpart = TREE_OPERAND (expr, 0);
8187 ipart = TREE_OPERAND (expr, 1);
8189 else if (TREE_CODE (expr) == COMPLEX_CST)
8191 rpart = TREE_REALPART (expr);
8192 ipart = TREE_IMAGPART (expr);
8196 expr = save_expr (expr);
8197 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8198 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8201 rpart = save_expr (rpart);
8202 ipart = save_expr (ipart);
8203 tem = fold_build2 (PLUS_EXPR, itype,
8204 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8205 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8206 return fold_build2 (COMPLEX_EXPR, type, tem,
8207 fold_convert (itype, integer_zero_node));
8211 /* Fold a binary expression of code CODE and type TYPE with operands
8212 OP0 and OP1. Return the folded expression if folding is
8213 successful. Otherwise, return NULL_TREE. */
8216 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8218 enum tree_code_class kind = TREE_CODE_CLASS (code);
8219 tree arg0, arg1, tem;
8220 tree t1 = NULL_TREE;
8222 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8223 && TREE_CODE_LENGTH (code) == 2
8225 && op1 != NULL_TREE);
8230 /* Strip any conversions that don't change the mode. This is
8231 safe for every expression, except for a comparison expression
8232 because its signedness is derived from its operands. So, in
8233 the latter case, only strip conversions that don't change the
8236 Note that this is done as an internal manipulation within the
8237 constant folder, in order to find the simplest representation
8238 of the arguments so that their form can be studied. In any
8239 cases, the appropriate type conversions should be put back in
8240 the tree that will get out of the constant folder. */
8242 if (kind == tcc_comparison)
8244 STRIP_SIGN_NOPS (arg0);
8245 STRIP_SIGN_NOPS (arg1);
8253 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8254 constant but we can't do arithmetic on them. */
8255 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8256 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8257 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8258 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8260 if (kind == tcc_binary)
8261 tem = const_binop (code, arg0, arg1, 0);
8262 else if (kind == tcc_comparison)
8263 tem = fold_relational_const (code, type, arg0, arg1);
8267 if (tem != NULL_TREE)
8269 if (TREE_TYPE (tem) != type)
8270 tem = fold_convert (type, tem);
8275 /* If this is a commutative operation, and ARG0 is a constant, move it
8276 to ARG1 to reduce the number of tests below. */
8277 if (commutative_tree_code (code)
8278 && tree_swap_operands_p (arg0, arg1, true))
8279 return fold_build2 (code, type, op1, op0);
8281 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8283 First check for cases where an arithmetic operation is applied to a
8284 compound, conditional, or comparison operation. Push the arithmetic
8285 operation inside the compound or conditional to see if any folding
8286 can then be done. Convert comparison to conditional for this purpose.
8287 The also optimizes non-constant cases that used to be done in
8290 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8291 one of the operands is a comparison and the other is a comparison, a
8292 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8293 code below would make the expression more complex. Change it to a
8294 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8295 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8297 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8298 || code == EQ_EXPR || code == NE_EXPR)
8299 && ((truth_value_p (TREE_CODE (arg0))
8300 && (truth_value_p (TREE_CODE (arg1))
8301 || (TREE_CODE (arg1) == BIT_AND_EXPR
8302 && integer_onep (TREE_OPERAND (arg1, 1)))))
8303 || (truth_value_p (TREE_CODE (arg1))
8304 && (truth_value_p (TREE_CODE (arg0))
8305 || (TREE_CODE (arg0) == BIT_AND_EXPR
8306 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8308 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8309 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8312 fold_convert (boolean_type_node, arg0),
8313 fold_convert (boolean_type_node, arg1));
8315 if (code == EQ_EXPR)
8316 tem = invert_truthvalue (tem);
8318 return fold_convert (type, tem);
8321 if (TREE_CODE_CLASS (code) == tcc_binary
8322 || TREE_CODE_CLASS (code) == tcc_comparison)
8324 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8325 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8326 fold_build2 (code, type,
8327 TREE_OPERAND (arg0, 1), op1));
8328 if (TREE_CODE (arg1) == COMPOUND_EXPR
8329 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8330 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8331 fold_build2 (code, type,
8332 op0, TREE_OPERAND (arg1, 1)));
8334 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8336 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8338 /*cond_first_p=*/1);
8339 if (tem != NULL_TREE)
8343 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8345 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8347 /*cond_first_p=*/0);
8348 if (tem != NULL_TREE)
8356 /* A + (-B) -> A - B */
8357 if (TREE_CODE (arg1) == NEGATE_EXPR)
8358 return fold_build2 (MINUS_EXPR, type,
8359 fold_convert (type, arg0),
8360 fold_convert (type, TREE_OPERAND (arg1, 0)));
8361 /* (-A) + B -> B - A */
8362 if (TREE_CODE (arg0) == NEGATE_EXPR
8363 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8364 return fold_build2 (MINUS_EXPR, type,
8365 fold_convert (type, arg1),
8366 fold_convert (type, TREE_OPERAND (arg0, 0)));
8367 /* Convert ~A + 1 to -A. */
8368 if (INTEGRAL_TYPE_P (type)
8369 && TREE_CODE (arg0) == BIT_NOT_EXPR
8370 && integer_onep (arg1))
8371 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8373 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8375 if ((TREE_CODE (arg0) == MULT_EXPR
8376 || TREE_CODE (arg1) == MULT_EXPR)
8377 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8379 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8384 if (! FLOAT_TYPE_P (type))
8386 if (integer_zerop (arg1))
8387 return non_lvalue (fold_convert (type, arg0));
8389 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8390 with a constant, and the two constants have no bits in common,
8391 we should treat this as a BIT_IOR_EXPR since this may produce more
8393 if (TREE_CODE (arg0) == BIT_AND_EXPR
8394 && TREE_CODE (arg1) == BIT_AND_EXPR
8395 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8396 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8397 && integer_zerop (const_binop (BIT_AND_EXPR,
8398 TREE_OPERAND (arg0, 1),
8399 TREE_OPERAND (arg1, 1), 0)))
8401 code = BIT_IOR_EXPR;
8405 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8406 (plus (plus (mult) (mult)) (foo)) so that we can
8407 take advantage of the factoring cases below. */
8408 if (((TREE_CODE (arg0) == PLUS_EXPR
8409 || TREE_CODE (arg0) == MINUS_EXPR)
8410 && TREE_CODE (arg1) == MULT_EXPR)
8411 || ((TREE_CODE (arg1) == PLUS_EXPR
8412 || TREE_CODE (arg1) == MINUS_EXPR)
8413 && TREE_CODE (arg0) == MULT_EXPR))
8415 tree parg0, parg1, parg, marg;
8416 enum tree_code pcode;
8418 if (TREE_CODE (arg1) == MULT_EXPR)
8419 parg = arg0, marg = arg1;
8421 parg = arg1, marg = arg0;
8422 pcode = TREE_CODE (parg);
8423 parg0 = TREE_OPERAND (parg, 0);
8424 parg1 = TREE_OPERAND (parg, 1);
8428 if (TREE_CODE (parg0) == MULT_EXPR
8429 && TREE_CODE (parg1) != MULT_EXPR)
8430 return fold_build2 (pcode, type,
8431 fold_build2 (PLUS_EXPR, type,
8432 fold_convert (type, parg0),
8433 fold_convert (type, marg)),
8434 fold_convert (type, parg1));
8435 if (TREE_CODE (parg0) != MULT_EXPR
8436 && TREE_CODE (parg1) == MULT_EXPR)
8437 return fold_build2 (PLUS_EXPR, type,
8438 fold_convert (type, parg0),
8439 fold_build2 (pcode, type,
8440 fold_convert (type, marg),
8445 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8446 of the array. Loop optimizer sometimes produce this type of
8448 if (TREE_CODE (arg0) == ADDR_EXPR)
8450 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8452 return fold_convert (type, tem);
8454 else if (TREE_CODE (arg1) == ADDR_EXPR)
8456 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8458 return fold_convert (type, tem);
8463 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8464 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8465 return non_lvalue (fold_convert (type, arg0));
8467 /* Likewise if the operands are reversed. */
8468 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8469 return non_lvalue (fold_convert (type, arg1));
8471 /* Convert X + -C into X - C. */
8472 if (TREE_CODE (arg1) == REAL_CST
8473 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8475 tem = fold_negate_const (arg1, type);
8476 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8477 return fold_build2 (MINUS_EXPR, type,
8478 fold_convert (type, arg0),
8479 fold_convert (type, tem));
8482 if (flag_unsafe_math_optimizations
8483 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8484 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8485 && (tem = distribute_real_division (code, type, arg0, arg1)))
8488 /* Convert x+x into x*2.0. */
8489 if (operand_equal_p (arg0, arg1, 0)
8490 && SCALAR_FLOAT_TYPE_P (type))
8491 return fold_build2 (MULT_EXPR, type, arg0,
8492 build_real (type, dconst2));
8494 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8495 if (flag_unsafe_math_optimizations
8496 && TREE_CODE (arg1) == PLUS_EXPR
8497 && TREE_CODE (arg0) != MULT_EXPR)
8499 tree tree10 = TREE_OPERAND (arg1, 0);
8500 tree tree11 = TREE_OPERAND (arg1, 1);
8501 if (TREE_CODE (tree11) == MULT_EXPR
8502 && TREE_CODE (tree10) == MULT_EXPR)
8505 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8506 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8509 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8510 if (flag_unsafe_math_optimizations
8511 && TREE_CODE (arg0) == PLUS_EXPR
8512 && TREE_CODE (arg1) != MULT_EXPR)
8514 tree tree00 = TREE_OPERAND (arg0, 0);
8515 tree tree01 = TREE_OPERAND (arg0, 1);
8516 if (TREE_CODE (tree01) == MULT_EXPR
8517 && TREE_CODE (tree00) == MULT_EXPR)
8520 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8521 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8527 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8528 is a rotate of A by C1 bits. */
8529 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8530 is a rotate of A by B bits. */
8532 enum tree_code code0, code1;
8533 code0 = TREE_CODE (arg0);
8534 code1 = TREE_CODE (arg1);
8535 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8536 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8537 && operand_equal_p (TREE_OPERAND (arg0, 0),
8538 TREE_OPERAND (arg1, 0), 0)
8539 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8541 tree tree01, tree11;
8542 enum tree_code code01, code11;
8544 tree01 = TREE_OPERAND (arg0, 1);
8545 tree11 = TREE_OPERAND (arg1, 1);
8546 STRIP_NOPS (tree01);
8547 STRIP_NOPS (tree11);
8548 code01 = TREE_CODE (tree01);
8549 code11 = TREE_CODE (tree11);
8550 if (code01 == INTEGER_CST
8551 && code11 == INTEGER_CST
8552 && TREE_INT_CST_HIGH (tree01) == 0
8553 && TREE_INT_CST_HIGH (tree11) == 0
8554 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8555 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8556 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8557 code0 == LSHIFT_EXPR ? tree01 : tree11);
8558 else if (code11 == MINUS_EXPR)
8560 tree tree110, tree111;
8561 tree110 = TREE_OPERAND (tree11, 0);
8562 tree111 = TREE_OPERAND (tree11, 1);
8563 STRIP_NOPS (tree110);
8564 STRIP_NOPS (tree111);
8565 if (TREE_CODE (tree110) == INTEGER_CST
8566 && 0 == compare_tree_int (tree110,
8568 (TREE_TYPE (TREE_OPERAND
8570 && operand_equal_p (tree01, tree111, 0))
8571 return build2 ((code0 == LSHIFT_EXPR
8574 type, TREE_OPERAND (arg0, 0), tree01);
8576 else if (code01 == MINUS_EXPR)
8578 tree tree010, tree011;
8579 tree010 = TREE_OPERAND (tree01, 0);
8580 tree011 = TREE_OPERAND (tree01, 1);
8581 STRIP_NOPS (tree010);
8582 STRIP_NOPS (tree011);
8583 if (TREE_CODE (tree010) == INTEGER_CST
8584 && 0 == compare_tree_int (tree010,
8586 (TREE_TYPE (TREE_OPERAND
8588 && operand_equal_p (tree11, tree011, 0))
8589 return build2 ((code0 != LSHIFT_EXPR
8592 type, TREE_OPERAND (arg0, 0), tree11);
8598 /* In most languages, can't associate operations on floats through
8599 parentheses. Rather than remember where the parentheses were, we
8600 don't associate floats at all, unless the user has specified
8601 -funsafe-math-optimizations. */
8603 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8605 tree var0, con0, lit0, minus_lit0;
8606 tree var1, con1, lit1, minus_lit1;
8608 /* Split both trees into variables, constants, and literals. Then
8609 associate each group together, the constants with literals,
8610 then the result with variables. This increases the chances of
8611 literals being recombined later and of generating relocatable
8612 expressions for the sum of a constant and literal. */
8613 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8614 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8615 code == MINUS_EXPR);
8617 /* Only do something if we found more than two objects. Otherwise,
8618 nothing has changed and we risk infinite recursion. */
8619 if (2 < ((var0 != 0) + (var1 != 0)
8620 + (con0 != 0) + (con1 != 0)
8621 + (lit0 != 0) + (lit1 != 0)
8622 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8624 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8625 if (code == MINUS_EXPR)
8628 var0 = associate_trees (var0, var1, code, type);
8629 con0 = associate_trees (con0, con1, code, type);
8630 lit0 = associate_trees (lit0, lit1, code, type);
8631 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8633 /* Preserve the MINUS_EXPR if the negative part of the literal is
8634 greater than the positive part. Otherwise, the multiplicative
8635 folding code (i.e extract_muldiv) may be fooled in case
8636 unsigned constants are subtracted, like in the following
8637 example: ((X*2 + 4) - 8U)/2. */
8638 if (minus_lit0 && lit0)
8640 if (TREE_CODE (lit0) == INTEGER_CST
8641 && TREE_CODE (minus_lit0) == INTEGER_CST
8642 && tree_int_cst_lt (lit0, minus_lit0))
8644 minus_lit0 = associate_trees (minus_lit0, lit0,
8650 lit0 = associate_trees (lit0, minus_lit0,
8658 return fold_convert (type,
8659 associate_trees (var0, minus_lit0,
8663 con0 = associate_trees (con0, minus_lit0,
8665 return fold_convert (type,
8666 associate_trees (var0, con0,
8671 con0 = associate_trees (con0, lit0, code, type);
8672 return fold_convert (type, associate_trees (var0, con0,
8680 /* A - (-B) -> A + B */
8681 if (TREE_CODE (arg1) == NEGATE_EXPR)
8682 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8683 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8684 if (TREE_CODE (arg0) == NEGATE_EXPR
8685 && (FLOAT_TYPE_P (type)
8686 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8687 && negate_expr_p (arg1)
8688 && reorder_operands_p (arg0, arg1))
8689 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8690 TREE_OPERAND (arg0, 0));
8691 /* Convert -A - 1 to ~A. */
8692 if (INTEGRAL_TYPE_P (type)
8693 && TREE_CODE (arg0) == NEGATE_EXPR
8694 && integer_onep (arg1))
8695 return fold_build1 (BIT_NOT_EXPR, type,
8696 fold_convert (type, TREE_OPERAND (arg0, 0)));
8698 /* Convert -1 - A to ~A. */
8699 if (INTEGRAL_TYPE_P (type)
8700 && integer_all_onesp (arg0))
8701 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8703 if (! FLOAT_TYPE_P (type))
8705 if (integer_zerop (arg0))
8706 return negate_expr (fold_convert (type, arg1));
8707 if (integer_zerop (arg1))
8708 return non_lvalue (fold_convert (type, arg0));
8710 /* Fold A - (A & B) into ~B & A. */
8711 if (!TREE_SIDE_EFFECTS (arg0)
8712 && TREE_CODE (arg1) == BIT_AND_EXPR)
8714 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8715 return fold_build2 (BIT_AND_EXPR, type,
8716 fold_build1 (BIT_NOT_EXPR, type,
8717 TREE_OPERAND (arg1, 0)),
8719 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8720 return fold_build2 (BIT_AND_EXPR, type,
8721 fold_build1 (BIT_NOT_EXPR, type,
8722 TREE_OPERAND (arg1, 1)),
8726 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8727 any power of 2 minus 1. */
8728 if (TREE_CODE (arg0) == BIT_AND_EXPR
8729 && TREE_CODE (arg1) == BIT_AND_EXPR
8730 && operand_equal_p (TREE_OPERAND (arg0, 0),
8731 TREE_OPERAND (arg1, 0), 0))
8733 tree mask0 = TREE_OPERAND (arg0, 1);
8734 tree mask1 = TREE_OPERAND (arg1, 1);
8735 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8737 if (operand_equal_p (tem, mask1, 0))
8739 tem = fold_build2 (BIT_XOR_EXPR, type,
8740 TREE_OPERAND (arg0, 0), mask1);
8741 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8746 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8747 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8748 return non_lvalue (fold_convert (type, arg0));
8750 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8751 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8752 (-ARG1 + ARG0) reduces to -ARG1. */
8753 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8754 return negate_expr (fold_convert (type, arg1));
8756 /* Fold &x - &x. This can happen from &x.foo - &x.
8757 This is unsafe for certain floats even in non-IEEE formats.
8758 In IEEE, it is unsafe because it does wrong for NaNs.
8759 Also note that operand_equal_p is always false if an operand
8762 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8763 && operand_equal_p (arg0, arg1, 0))
8764 return fold_convert (type, integer_zero_node);
8766 /* A - B -> A + (-B) if B is easily negatable. */
8767 if (negate_expr_p (arg1)
8768 && ((FLOAT_TYPE_P (type)
8769 /* Avoid this transformation if B is a positive REAL_CST. */
8770 && (TREE_CODE (arg1) != REAL_CST
8771 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8772 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8773 return fold_build2 (PLUS_EXPR, type,
8774 fold_convert (type, arg0),
8775 fold_convert (type, negate_expr (arg1)));
8777 /* Try folding difference of addresses. */
8781 if ((TREE_CODE (arg0) == ADDR_EXPR
8782 || TREE_CODE (arg1) == ADDR_EXPR)
8783 && ptr_difference_const (arg0, arg1, &diff))
8784 return build_int_cst_type (type, diff);
8787 /* Fold &a[i] - &a[j] to i-j. */
8788 if (TREE_CODE (arg0) == ADDR_EXPR
8789 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8790 && TREE_CODE (arg1) == ADDR_EXPR
8791 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8793 tree aref0 = TREE_OPERAND (arg0, 0);
8794 tree aref1 = TREE_OPERAND (arg1, 0);
8795 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8796 TREE_OPERAND (aref1, 0), 0))
8798 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8799 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8800 tree esz = array_ref_element_size (aref0);
8801 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8802 return fold_build2 (MULT_EXPR, type, diff,
8803 fold_convert (type, esz));
8808 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8809 of the array. Loop optimizer sometimes produce this type of
8811 if (TREE_CODE (arg0) == ADDR_EXPR)
8813 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8815 return fold_convert (type, tem);
8818 if (flag_unsafe_math_optimizations
8819 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8820 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8821 && (tem = distribute_real_division (code, type, arg0, arg1)))
8824 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8826 if ((TREE_CODE (arg0) == MULT_EXPR
8827 || TREE_CODE (arg1) == MULT_EXPR)
8828 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8830 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8838 /* (-A) * (-B) -> A * B */
8839 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8840 return fold_build2 (MULT_EXPR, type,
8841 TREE_OPERAND (arg0, 0),
8842 negate_expr (arg1));
8843 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8844 return fold_build2 (MULT_EXPR, type,
8846 TREE_OPERAND (arg1, 0));
8848 if (! FLOAT_TYPE_P (type))
8850 if (integer_zerop (arg1))
8851 return omit_one_operand (type, arg1, arg0);
8852 if (integer_onep (arg1))
8853 return non_lvalue (fold_convert (type, arg0));
8854 /* Transform x * -1 into -x. */
8855 if (integer_all_onesp (arg1))
8856 return fold_convert (type, negate_expr (arg0));
8858 /* (a * (1 << b)) is (a << b) */
8859 if (TREE_CODE (arg1) == LSHIFT_EXPR
8860 && integer_onep (TREE_OPERAND (arg1, 0)))
8861 return fold_build2 (LSHIFT_EXPR, type, arg0,
8862 TREE_OPERAND (arg1, 1));
8863 if (TREE_CODE (arg0) == LSHIFT_EXPR
8864 && integer_onep (TREE_OPERAND (arg0, 0)))
8865 return fold_build2 (LSHIFT_EXPR, type, arg1,
8866 TREE_OPERAND (arg0, 1));
8868 if (TREE_CODE (arg1) == INTEGER_CST
8869 && 0 != (tem = extract_muldiv (op0,
8870 fold_convert (type, arg1),
8872 return fold_convert (type, tem);
8874 /* Optimize z * conj(z) for integer complex numbers. */
8875 if (TREE_CODE (arg0) == CONJ_EXPR
8876 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8877 return fold_mult_zconjz (type, arg1);
8878 if (TREE_CODE (arg1) == CONJ_EXPR
8879 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8880 return fold_mult_zconjz (type, arg0);
8884 /* Maybe fold x * 0 to 0. The expressions aren't the same
8885 when x is NaN, since x * 0 is also NaN. Nor are they the
8886 same in modes with signed zeros, since multiplying a
8887 negative value by 0 gives -0, not +0. */
8888 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8889 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8890 && real_zerop (arg1))
8891 return omit_one_operand (type, arg1, arg0);
8892 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8893 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8894 && real_onep (arg1))
8895 return non_lvalue (fold_convert (type, arg0));
8897 /* Transform x * -1.0 into -x. */
8898 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8899 && real_minus_onep (arg1))
8900 return fold_convert (type, negate_expr (arg0));
8902 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8903 if (flag_unsafe_math_optimizations
8904 && TREE_CODE (arg0) == RDIV_EXPR
8905 && TREE_CODE (arg1) == REAL_CST
8906 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8908 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8911 return fold_build2 (RDIV_EXPR, type, tem,
8912 TREE_OPERAND (arg0, 1));
8915 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8916 if (operand_equal_p (arg0, arg1, 0))
8918 tree tem = fold_strip_sign_ops (arg0);
8919 if (tem != NULL_TREE)
8921 tem = fold_convert (type, tem);
8922 return fold_build2 (MULT_EXPR, type, tem, tem);
8926 /* Optimize z * conj(z) for floating point complex numbers.
8927 Guarded by flag_unsafe_math_optimizations as non-finite
8928 imaginary components don't produce scalar results. */
8929 if (flag_unsafe_math_optimizations
8930 && TREE_CODE (arg0) == CONJ_EXPR
8931 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8932 return fold_mult_zconjz (type, arg1);
8933 if (flag_unsafe_math_optimizations
8934 && TREE_CODE (arg1) == CONJ_EXPR
8935 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8936 return fold_mult_zconjz (type, arg0);
8938 if (flag_unsafe_math_optimizations)
8940 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8941 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8943 /* Optimizations of root(...)*root(...). */
8944 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8946 tree rootfn, arg, arglist;
8947 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8948 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8950 /* Optimize sqrt(x)*sqrt(x) as x. */
8951 if (BUILTIN_SQRT_P (fcode0)
8952 && operand_equal_p (arg00, arg10, 0)
8953 && ! HONOR_SNANS (TYPE_MODE (type)))
8956 /* Optimize root(x)*root(y) as root(x*y). */
8957 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8958 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8959 arglist = build_tree_list (NULL_TREE, arg);
8960 return build_function_call_expr (rootfn, arglist);
8963 /* Optimize expN(x)*expN(y) as expN(x+y). */
8964 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8966 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8967 tree arg = fold_build2 (PLUS_EXPR, type,
8968 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8969 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8970 tree arglist = build_tree_list (NULL_TREE, arg);
8971 return build_function_call_expr (expfn, arglist);
8974 /* Optimizations of pow(...)*pow(...). */
8975 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8976 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8977 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8979 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8980 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8982 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8983 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8986 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8987 if (operand_equal_p (arg01, arg11, 0))
8989 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8990 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8991 tree arglist = tree_cons (NULL_TREE, arg,
8992 build_tree_list (NULL_TREE,
8994 return build_function_call_expr (powfn, arglist);
8997 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8998 if (operand_equal_p (arg00, arg10, 0))
9000 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9001 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9002 tree arglist = tree_cons (NULL_TREE, arg00,
9003 build_tree_list (NULL_TREE,
9005 return build_function_call_expr (powfn, arglist);
9009 /* Optimize tan(x)*cos(x) as sin(x). */
9010 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9011 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9012 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9013 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9014 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9015 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9016 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9017 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9019 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9021 if (sinfn != NULL_TREE)
9022 return build_function_call_expr (sinfn,
9023 TREE_OPERAND (arg0, 1));
9026 /* Optimize x*pow(x,c) as pow(x,c+1). */
9027 if (fcode1 == BUILT_IN_POW
9028 || fcode1 == BUILT_IN_POWF
9029 || fcode1 == BUILT_IN_POWL)
9031 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9032 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9034 if (TREE_CODE (arg11) == REAL_CST
9035 && ! TREE_CONSTANT_OVERFLOW (arg11)
9036 && operand_equal_p (arg0, arg10, 0))
9038 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9042 c = TREE_REAL_CST (arg11);
9043 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9044 arg = build_real (type, c);
9045 arglist = build_tree_list (NULL_TREE, arg);
9046 arglist = tree_cons (NULL_TREE, arg0, arglist);
9047 return build_function_call_expr (powfn, arglist);
9051 /* Optimize pow(x,c)*x as pow(x,c+1). */
9052 if (fcode0 == BUILT_IN_POW
9053 || fcode0 == BUILT_IN_POWF
9054 || fcode0 == BUILT_IN_POWL)
9056 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9057 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9059 if (TREE_CODE (arg01) == REAL_CST
9060 && ! TREE_CONSTANT_OVERFLOW (arg01)
9061 && operand_equal_p (arg1, arg00, 0))
9063 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9067 c = TREE_REAL_CST (arg01);
9068 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9069 arg = build_real (type, c);
9070 arglist = build_tree_list (NULL_TREE, arg);
9071 arglist = tree_cons (NULL_TREE, arg1, arglist);
9072 return build_function_call_expr (powfn, arglist);
9076 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9078 && operand_equal_p (arg0, arg1, 0))
9080 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9084 tree arg = build_real (type, dconst2);
9085 tree arglist = build_tree_list (NULL_TREE, arg);
9086 arglist = tree_cons (NULL_TREE, arg0, arglist);
9087 return build_function_call_expr (powfn, arglist);
9096 if (integer_all_onesp (arg1))
9097 return omit_one_operand (type, arg1, arg0);
9098 if (integer_zerop (arg1))
9099 return non_lvalue (fold_convert (type, arg0));
9100 if (operand_equal_p (arg0, arg1, 0))
9101 return non_lvalue (fold_convert (type, arg0));
9104 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9105 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9107 t1 = build_int_cst (type, -1);
9108 t1 = force_fit_type (t1, 0, false, false);
9109 return omit_one_operand (type, t1, arg1);
9113 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9114 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9116 t1 = build_int_cst (type, -1);
9117 t1 = force_fit_type (t1, 0, false, false);
9118 return omit_one_operand (type, t1, arg0);
9121 /* Canonicalize (X & C1) | C2. */
9122 if (TREE_CODE (arg0) == BIT_AND_EXPR
9123 && TREE_CODE (arg1) == INTEGER_CST
9124 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9126 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9127 int width = TYPE_PRECISION (type);
9128 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9129 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9130 hi2 = TREE_INT_CST_HIGH (arg1);
9131 lo2 = TREE_INT_CST_LOW (arg1);
9133 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9134 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9135 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9137 if (width > HOST_BITS_PER_WIDE_INT)
9139 mhi = (unsigned HOST_WIDE_INT) -1
9140 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9146 mlo = (unsigned HOST_WIDE_INT) -1
9147 >> (HOST_BITS_PER_WIDE_INT - width);
9150 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9151 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9152 return fold_build2 (BIT_IOR_EXPR, type,
9153 TREE_OPERAND (arg0, 0), arg1);
9155 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9158 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9159 return fold_build2 (BIT_IOR_EXPR, type,
9160 fold_build2 (BIT_AND_EXPR, type,
9161 TREE_OPERAND (arg0, 0),
9162 build_int_cst_wide (type,
9168 /* (X & Y) | Y is (X, Y). */
9169 if (TREE_CODE (arg0) == BIT_AND_EXPR
9170 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9171 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9172 /* (X & Y) | X is (Y, X). */
9173 if (TREE_CODE (arg0) == BIT_AND_EXPR
9174 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9175 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9176 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9177 /* X | (X & Y) is (Y, X). */
9178 if (TREE_CODE (arg1) == BIT_AND_EXPR
9179 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9180 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9181 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9182 /* X | (Y & X) is (Y, X). */
9183 if (TREE_CODE (arg1) == BIT_AND_EXPR
9184 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9185 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9186 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9188 t1 = distribute_bit_expr (code, type, arg0, arg1);
9189 if (t1 != NULL_TREE)
9192 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9194 This results in more efficient code for machines without a NAND
9195 instruction. Combine will canonicalize to the first form
9196 which will allow use of NAND instructions provided by the
9197 backend if they exist. */
9198 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9199 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9201 return fold_build1 (BIT_NOT_EXPR, type,
9202 build2 (BIT_AND_EXPR, type,
9203 TREE_OPERAND (arg0, 0),
9204 TREE_OPERAND (arg1, 0)));
9207 /* See if this can be simplified into a rotate first. If that
9208 is unsuccessful continue in the association code. */
9212 if (integer_zerop (arg1))
9213 return non_lvalue (fold_convert (type, arg0));
9214 if (integer_all_onesp (arg1))
9215 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9216 if (operand_equal_p (arg0, arg1, 0))
9217 return omit_one_operand (type, integer_zero_node, arg0);
9220 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9221 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9223 t1 = build_int_cst (type, -1);
9224 t1 = force_fit_type (t1, 0, false, false);
9225 return omit_one_operand (type, t1, arg1);
9229 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9230 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9232 t1 = build_int_cst (type, -1);
9233 t1 = force_fit_type (t1, 0, false, false);
9234 return omit_one_operand (type, t1, arg0);
9237 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9238 with a constant, and the two constants have no bits in common,
9239 we should treat this as a BIT_IOR_EXPR since this may produce more
9241 if (TREE_CODE (arg0) == BIT_AND_EXPR
9242 && TREE_CODE (arg1) == BIT_AND_EXPR
9243 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9244 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9245 && integer_zerop (const_binop (BIT_AND_EXPR,
9246 TREE_OPERAND (arg0, 1),
9247 TREE_OPERAND (arg1, 1), 0)))
9249 code = BIT_IOR_EXPR;
9253 /* (X | Y) ^ X -> Y & ~ X*/
9254 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9257 tree t2 = TREE_OPERAND (arg0, 1);
9258 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9260 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9261 fold_convert (type, t1));
9265 /* (Y | X) ^ X -> Y & ~ X*/
9266 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9267 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9269 tree t2 = TREE_OPERAND (arg0, 0);
9270 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9272 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9273 fold_convert (type, t1));
9277 /* X ^ (X | Y) -> Y & ~ X*/
9278 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9279 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9281 tree t2 = TREE_OPERAND (arg1, 1);
9282 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9284 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9285 fold_convert (type, t1));
9289 /* X ^ (Y | X) -> Y & ~ X*/
9290 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9291 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9293 tree t2 = TREE_OPERAND (arg1, 0);
9294 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9296 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9297 fold_convert (type, t1));
9301 /* Convert ~X ^ ~Y to X ^ Y. */
9302 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9303 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9304 return fold_build2 (code, type,
9305 fold_convert (type, TREE_OPERAND (arg0, 0)),
9306 fold_convert (type, TREE_OPERAND (arg1, 0)));
9308 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9309 if (TREE_CODE (arg0) == BIT_AND_EXPR
9310 && integer_onep (TREE_OPERAND (arg0, 1))
9311 && integer_onep (arg1))
9312 return fold_build2 (EQ_EXPR, type, arg0,
9313 build_int_cst (TREE_TYPE (arg0), 0));
9315 /* Fold (X & Y) ^ Y as ~X & Y. */
9316 if (TREE_CODE (arg0) == BIT_AND_EXPR
9317 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9319 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9320 return fold_build2 (BIT_AND_EXPR, type,
9321 fold_build1 (BIT_NOT_EXPR, type, tem),
9322 fold_convert (type, arg1));
9324 /* Fold (X & Y) ^ X as ~Y & X. */
9325 if (TREE_CODE (arg0) == BIT_AND_EXPR
9326 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9327 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9329 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9330 return fold_build2 (BIT_AND_EXPR, type,
9331 fold_build1 (BIT_NOT_EXPR, type, tem),
9332 fold_convert (type, arg1));
9334 /* Fold X ^ (X & Y) as X & ~Y. */
9335 if (TREE_CODE (arg1) == BIT_AND_EXPR
9336 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9338 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9339 return fold_build2 (BIT_AND_EXPR, type,
9340 fold_convert (type, arg0),
9341 fold_build1 (BIT_NOT_EXPR, type, tem));
9343 /* Fold X ^ (Y & X) as ~Y & X. */
9344 if (TREE_CODE (arg1) == BIT_AND_EXPR
9345 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9346 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9348 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9349 return fold_build2 (BIT_AND_EXPR, type,
9350 fold_build1 (BIT_NOT_EXPR, type, tem),
9351 fold_convert (type, arg0));
9354 /* See if this can be simplified into a rotate first. If that
9355 is unsuccessful continue in the association code. */
9359 if (integer_all_onesp (arg1))
9360 return non_lvalue (fold_convert (type, arg0));
9361 if (integer_zerop (arg1))
9362 return omit_one_operand (type, arg1, arg0);
9363 if (operand_equal_p (arg0, arg1, 0))
9364 return non_lvalue (fold_convert (type, arg0));
9366 /* ~X & X is always zero. */
9367 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9368 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9369 return omit_one_operand (type, integer_zero_node, arg1);
9371 /* X & ~X is always zero. */
9372 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9373 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9374 return omit_one_operand (type, integer_zero_node, arg0);
9376 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9377 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9378 && TREE_CODE (arg1) == INTEGER_CST
9379 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9380 return fold_build2 (BIT_IOR_EXPR, type,
9381 fold_build2 (BIT_AND_EXPR, type,
9382 TREE_OPERAND (arg0, 0), arg1),
9383 fold_build2 (BIT_AND_EXPR, type,
9384 TREE_OPERAND (arg0, 1), arg1));
9386 /* (X | Y) & Y is (X, Y). */
9387 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9388 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9389 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9390 /* (X | Y) & X is (Y, X). */
9391 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9392 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9393 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9394 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9395 /* X & (X | Y) is (Y, X). */
9396 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9397 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9398 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9399 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9400 /* X & (Y | X) is (Y, X). */
9401 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9402 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9403 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9404 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9406 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9407 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9408 && integer_onep (TREE_OPERAND (arg0, 1))
9409 && integer_onep (arg1))
9411 tem = TREE_OPERAND (arg0, 0);
9412 return fold_build2 (EQ_EXPR, type,
9413 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9414 build_int_cst (TREE_TYPE (tem), 1)),
9415 build_int_cst (TREE_TYPE (tem), 0));
9417 /* Fold ~X & 1 as (X & 1) == 0. */
9418 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9419 && integer_onep (arg1))
9421 tem = TREE_OPERAND (arg0, 0);
9422 return fold_build2 (EQ_EXPR, type,
9423 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9424 build_int_cst (TREE_TYPE (tem), 1)),
9425 build_int_cst (TREE_TYPE (tem), 0));
9428 /* Fold (X ^ Y) & Y as ~X & Y. */
9429 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9430 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9432 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9433 return fold_build2 (BIT_AND_EXPR, type,
9434 fold_build1 (BIT_NOT_EXPR, type, tem),
9435 fold_convert (type, arg1));
9437 /* Fold (X ^ Y) & X as ~Y & X. */
9438 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9439 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9440 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9442 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9443 return fold_build2 (BIT_AND_EXPR, type,
9444 fold_build1 (BIT_NOT_EXPR, type, tem),
9445 fold_convert (type, arg1));
9447 /* Fold X & (X ^ Y) as X & ~Y. */
9448 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9449 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9451 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9452 return fold_build2 (BIT_AND_EXPR, type,
9453 fold_convert (type, arg0),
9454 fold_build1 (BIT_NOT_EXPR, type, tem));
9456 /* Fold X & (Y ^ X) as ~Y & X. */
9457 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9458 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9459 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9461 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9462 return fold_build2 (BIT_AND_EXPR, type,
9463 fold_build1 (BIT_NOT_EXPR, type, tem),
9464 fold_convert (type, arg0));
9467 t1 = distribute_bit_expr (code, type, arg0, arg1);
9468 if (t1 != NULL_TREE)
9470 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9471 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9472 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9475 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9477 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9478 && (~TREE_INT_CST_LOW (arg1)
9479 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9480 return fold_convert (type, TREE_OPERAND (arg0, 0));
9483 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9485 This results in more efficient code for machines without a NOR
9486 instruction. Combine will canonicalize to the first form
9487 which will allow use of NOR instructions provided by the
9488 backend if they exist. */
9489 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9490 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9492 return fold_build1 (BIT_NOT_EXPR, type,
9493 build2 (BIT_IOR_EXPR, type,
9494 TREE_OPERAND (arg0, 0),
9495 TREE_OPERAND (arg1, 0)));
9501 /* Don't touch a floating-point divide by zero unless the mode
9502 of the constant can represent infinity. */
9503 if (TREE_CODE (arg1) == REAL_CST
9504 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9505 && real_zerop (arg1))
9508 /* Optimize A / A to 1.0 if we don't care about
9509 NaNs or Infinities. Skip the transformation
9510 for non-real operands. */
9511 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9512 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9513 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9514 && operand_equal_p (arg0, arg1, 0))
9516 tree r = build_real (TREE_TYPE (arg0), dconst1);
9518 return omit_two_operands (type, r, arg0, arg1);
9521 /* The complex version of the above A / A optimization. */
9522 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9523 && operand_equal_p (arg0, arg1, 0))
9525 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9526 if (! HONOR_NANS (TYPE_MODE (elem_type))
9527 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9529 tree r = build_real (elem_type, dconst1);
9530 /* omit_two_operands will call fold_convert for us. */
9531 return omit_two_operands (type, r, arg0, arg1);
9535 /* (-A) / (-B) -> A / B */
9536 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9537 return fold_build2 (RDIV_EXPR, type,
9538 TREE_OPERAND (arg0, 0),
9539 negate_expr (arg1));
9540 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9541 return fold_build2 (RDIV_EXPR, type,
9543 TREE_OPERAND (arg1, 0));
9545 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9546 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9547 && real_onep (arg1))
9548 return non_lvalue (fold_convert (type, arg0));
9550 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9551 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9552 && real_minus_onep (arg1))
9553 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9555 /* If ARG1 is a constant, we can convert this to a multiply by the
9556 reciprocal. This does not have the same rounding properties,
9557 so only do this if -funsafe-math-optimizations. We can actually
9558 always safely do it if ARG1 is a power of two, but it's hard to
9559 tell if it is or not in a portable manner. */
9560 if (TREE_CODE (arg1) == REAL_CST)
9562 if (flag_unsafe_math_optimizations
9563 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9565 return fold_build2 (MULT_EXPR, type, arg0, tem);
9566 /* Find the reciprocal if optimizing and the result is exact. */
9570 r = TREE_REAL_CST (arg1);
9571 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9573 tem = build_real (type, r);
9574 return fold_build2 (MULT_EXPR, type,
9575 fold_convert (type, arg0), tem);
9579 /* Convert A/B/C to A/(B*C). */
9580 if (flag_unsafe_math_optimizations
9581 && TREE_CODE (arg0) == RDIV_EXPR)
9582 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9583 fold_build2 (MULT_EXPR, type,
9584 TREE_OPERAND (arg0, 1), arg1));
9586 /* Convert A/(B/C) to (A/B)*C. */
9587 if (flag_unsafe_math_optimizations
9588 && TREE_CODE (arg1) == RDIV_EXPR)
9589 return fold_build2 (MULT_EXPR, type,
9590 fold_build2 (RDIV_EXPR, type, arg0,
9591 TREE_OPERAND (arg1, 0)),
9592 TREE_OPERAND (arg1, 1));
9594 /* Convert C1/(X*C2) into (C1/C2)/X. */
9595 if (flag_unsafe_math_optimizations
9596 && TREE_CODE (arg1) == MULT_EXPR
9597 && TREE_CODE (arg0) == REAL_CST
9598 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9600 tree tem = const_binop (RDIV_EXPR, arg0,
9601 TREE_OPERAND (arg1, 1), 0);
9603 return fold_build2 (RDIV_EXPR, type, tem,
9604 TREE_OPERAND (arg1, 0));
9607 if (flag_unsafe_math_optimizations)
9609 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9610 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9612 /* Optimize sin(x)/cos(x) as tan(x). */
9613 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9614 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9615 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9616 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9617 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9619 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9621 if (tanfn != NULL_TREE)
9622 return build_function_call_expr (tanfn,
9623 TREE_OPERAND (arg0, 1));
9626 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9627 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9628 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9629 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9630 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9631 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9633 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9635 if (tanfn != NULL_TREE)
9637 tree tmp = TREE_OPERAND (arg0, 1);
9638 tmp = build_function_call_expr (tanfn, tmp);
9639 return fold_build2 (RDIV_EXPR, type,
9640 build_real (type, dconst1), tmp);
9644 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9645 NaNs or Infinities. */
9646 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9647 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9648 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9650 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9651 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9653 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9654 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9655 && operand_equal_p (arg00, arg01, 0))
9657 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9659 if (cosfn != NULL_TREE)
9660 return build_function_call_expr (cosfn,
9661 TREE_OPERAND (arg0, 1));
9665 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9666 NaNs or Infinities. */
9667 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9668 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9669 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9671 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9672 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9674 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9675 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9676 && operand_equal_p (arg00, arg01, 0))
9678 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9680 if (cosfn != NULL_TREE)
9682 tree tmp = TREE_OPERAND (arg0, 1);
9683 tmp = build_function_call_expr (cosfn, tmp);
9684 return fold_build2 (RDIV_EXPR, type,
9685 build_real (type, dconst1),
9691 /* Optimize pow(x,c)/x as pow(x,c-1). */
9692 if (fcode0 == BUILT_IN_POW
9693 || fcode0 == BUILT_IN_POWF
9694 || fcode0 == BUILT_IN_POWL)
9696 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9697 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9698 if (TREE_CODE (arg01) == REAL_CST
9699 && ! TREE_CONSTANT_OVERFLOW (arg01)
9700 && operand_equal_p (arg1, arg00, 0))
9702 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9706 c = TREE_REAL_CST (arg01);
9707 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9708 arg = build_real (type, c);
9709 arglist = build_tree_list (NULL_TREE, arg);
9710 arglist = tree_cons (NULL_TREE, arg1, arglist);
9711 return build_function_call_expr (powfn, arglist);
9715 /* Optimize x/expN(y) into x*expN(-y). */
9716 if (BUILTIN_EXPONENT_P (fcode1))
9718 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9719 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9720 tree arglist = build_tree_list (NULL_TREE,
9721 fold_convert (type, arg));
9722 arg1 = build_function_call_expr (expfn, arglist);
9723 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9726 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9727 if (fcode1 == BUILT_IN_POW
9728 || fcode1 == BUILT_IN_POWF
9729 || fcode1 == BUILT_IN_POWL)
9731 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9732 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9733 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9734 tree neg11 = fold_convert (type, negate_expr (arg11));
9735 tree arglist = tree_cons(NULL_TREE, arg10,
9736 build_tree_list (NULL_TREE, neg11));
9737 arg1 = build_function_call_expr (powfn, arglist);
9738 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9743 case TRUNC_DIV_EXPR:
9744 case FLOOR_DIV_EXPR:
9745 /* Simplify A / (B << N) where A and B are positive and B is
9746 a power of 2, to A >> (N + log2(B)). */
9747 if (TREE_CODE (arg1) == LSHIFT_EXPR
9748 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9750 tree sval = TREE_OPERAND (arg1, 0);
9751 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9753 tree sh_cnt = TREE_OPERAND (arg1, 1);
9754 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9756 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9757 sh_cnt, build_int_cst (NULL_TREE, pow2));
9758 return fold_build2 (RSHIFT_EXPR, type,
9759 fold_convert (type, arg0), sh_cnt);
9764 case ROUND_DIV_EXPR:
9766 case EXACT_DIV_EXPR:
9767 if (integer_onep (arg1))
9768 return non_lvalue (fold_convert (type, arg0));
9769 if (integer_zerop (arg1))
9772 if (!TYPE_UNSIGNED (type)
9773 && TREE_CODE (arg1) == INTEGER_CST
9774 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9775 && TREE_INT_CST_HIGH (arg1) == -1)
9776 return fold_convert (type, negate_expr (arg0));
9778 /* Convert -A / -B to A / B when the type is signed and overflow is
9780 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9781 && TREE_CODE (arg0) == NEGATE_EXPR
9782 && negate_expr_p (arg1))
9783 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9784 negate_expr (arg1));
9785 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9786 && TREE_CODE (arg1) == NEGATE_EXPR
9787 && negate_expr_p (arg0))
9788 return fold_build2 (code, type, negate_expr (arg0),
9789 TREE_OPERAND (arg1, 0));
9791 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9792 operation, EXACT_DIV_EXPR.
9794 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9795 At one time others generated faster code, it's not clear if they do
9796 after the last round to changes to the DIV code in expmed.c. */
9797 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9798 && multiple_of_p (type, arg0, arg1))
9799 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9801 if (TREE_CODE (arg1) == INTEGER_CST
9802 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9803 return fold_convert (type, tem);
9808 case FLOOR_MOD_EXPR:
9809 case ROUND_MOD_EXPR:
9810 case TRUNC_MOD_EXPR:
9811 /* X % 1 is always zero, but be sure to preserve any side
9813 if (integer_onep (arg1))
9814 return omit_one_operand (type, integer_zero_node, arg0);
9816 /* X % 0, return X % 0 unchanged so that we can get the
9817 proper warnings and errors. */
9818 if (integer_zerop (arg1))
9821 /* 0 % X is always zero, but be sure to preserve any side
9822 effects in X. Place this after checking for X == 0. */
9823 if (integer_zerop (arg0))
9824 return omit_one_operand (type, integer_zero_node, arg1);
9826 /* X % -1 is zero. */
9827 if (!TYPE_UNSIGNED (type)
9828 && TREE_CODE (arg1) == INTEGER_CST
9829 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9830 && TREE_INT_CST_HIGH (arg1) == -1)
9831 return omit_one_operand (type, integer_zero_node, arg0);
9833 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9834 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9835 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9836 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9839 /* Also optimize A % (C << N) where C is a power of 2,
9840 to A & ((C << N) - 1). */
9841 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9842 c = TREE_OPERAND (arg1, 0);
9844 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9846 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9847 arg1, integer_one_node);
9848 return fold_build2 (BIT_AND_EXPR, type,
9849 fold_convert (type, arg0),
9850 fold_convert (type, mask));
9854 /* X % -C is the same as X % C. */
9855 if (code == TRUNC_MOD_EXPR
9856 && !TYPE_UNSIGNED (type)
9857 && TREE_CODE (arg1) == INTEGER_CST
9858 && !TREE_CONSTANT_OVERFLOW (arg1)
9859 && TREE_INT_CST_HIGH (arg1) < 0
9861 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9862 && !sign_bit_p (arg1, arg1))
9863 return fold_build2 (code, type, fold_convert (type, arg0),
9864 fold_convert (type, negate_expr (arg1)));
9866 /* X % -Y is the same as X % Y. */
9867 if (code == TRUNC_MOD_EXPR
9868 && !TYPE_UNSIGNED (type)
9869 && TREE_CODE (arg1) == NEGATE_EXPR
9871 return fold_build2 (code, type, fold_convert (type, arg0),
9872 fold_convert (type, TREE_OPERAND (arg1, 0)));
9874 if (TREE_CODE (arg1) == INTEGER_CST
9875 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9876 return fold_convert (type, tem);
9882 if (integer_all_onesp (arg0))
9883 return omit_one_operand (type, arg0, arg1);
9887 /* Optimize -1 >> x for arithmetic right shifts. */
9888 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9889 return omit_one_operand (type, arg0, arg1);
9890 /* ... fall through ... */
9894 if (integer_zerop (arg1))
9895 return non_lvalue (fold_convert (type, arg0));
9896 if (integer_zerop (arg0))
9897 return omit_one_operand (type, arg0, arg1);
9899 /* Since negative shift count is not well-defined,
9900 don't try to compute it in the compiler. */
9901 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9904 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9905 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
9906 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9907 && host_integerp (TREE_OPERAND (arg0, 1), false)
9908 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9910 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9911 + TREE_INT_CST_LOW (arg1));
9913 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9914 being well defined. */
9915 if (low >= TYPE_PRECISION (type))
9917 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9918 low = low % TYPE_PRECISION (type);
9919 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9920 return build_int_cst (type, 0);
9922 low = TYPE_PRECISION (type) - 1;
9925 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9926 build_int_cst (type, low));
9929 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9930 into x & ((unsigned)-1 >> c) for unsigned types. */
9931 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9932 || (TYPE_UNSIGNED (type)
9933 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9934 && host_integerp (arg1, false)
9935 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9936 && host_integerp (TREE_OPERAND (arg0, 1), false)
9937 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9939 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9940 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9946 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9948 lshift = build_int_cst (type, -1);
9949 lshift = int_const_binop (code, lshift, arg1, 0);
9951 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9955 /* Rewrite an LROTATE_EXPR by a constant into an
9956 RROTATE_EXPR by a new constant. */
9957 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9959 tree tem = build_int_cst (NULL_TREE,
9960 GET_MODE_BITSIZE (TYPE_MODE (type)));
9961 tem = fold_convert (TREE_TYPE (arg1), tem);
9962 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9963 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9966 /* If we have a rotate of a bit operation with the rotate count and
9967 the second operand of the bit operation both constant,
9968 permute the two operations. */
9969 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9970 && (TREE_CODE (arg0) == BIT_AND_EXPR
9971 || TREE_CODE (arg0) == BIT_IOR_EXPR
9972 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9974 return fold_build2 (TREE_CODE (arg0), type,
9975 fold_build2 (code, type,
9976 TREE_OPERAND (arg0, 0), arg1),
9977 fold_build2 (code, type,
9978 TREE_OPERAND (arg0, 1), arg1));
9980 /* Two consecutive rotates adding up to the width of the mode can
9982 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9983 && TREE_CODE (arg0) == RROTATE_EXPR
9984 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9985 && TREE_INT_CST_HIGH (arg1) == 0
9986 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
9987 && ((TREE_INT_CST_LOW (arg1)
9988 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
9989 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
9990 return TREE_OPERAND (arg0, 0);
9995 if (operand_equal_p (arg0, arg1, 0))
9996 return omit_one_operand (type, arg0, arg1);
9997 if (INTEGRAL_TYPE_P (type)
9998 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9999 return omit_one_operand (type, arg1, arg0);
10000 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10006 if (operand_equal_p (arg0, arg1, 0))
10007 return omit_one_operand (type, arg0, arg1);
10008 if (INTEGRAL_TYPE_P (type)
10009 && TYPE_MAX_VALUE (type)
10010 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10011 return omit_one_operand (type, arg1, arg0);
10012 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10017 case TRUTH_ANDIF_EXPR:
10018 /* Note that the operands of this must be ints
10019 and their values must be 0 or 1.
10020 ("true" is a fixed value perhaps depending on the language.) */
10021 /* If first arg is constant zero, return it. */
10022 if (integer_zerop (arg0))
10023 return fold_convert (type, arg0);
10024 case TRUTH_AND_EXPR:
10025 /* If either arg is constant true, drop it. */
10026 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10027 return non_lvalue (fold_convert (type, arg1));
10028 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10029 /* Preserve sequence points. */
10030 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10031 return non_lvalue (fold_convert (type, arg0));
10032 /* If second arg is constant zero, result is zero, but first arg
10033 must be evaluated. */
10034 if (integer_zerop (arg1))
10035 return omit_one_operand (type, arg1, arg0);
10036 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10037 case will be handled here. */
10038 if (integer_zerop (arg0))
10039 return omit_one_operand (type, arg0, arg1);
10041 /* !X && X is always false. */
10042 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10043 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10044 return omit_one_operand (type, integer_zero_node, arg1);
10045 /* X && !X is always false. */
10046 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10047 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10048 return omit_one_operand (type, integer_zero_node, arg0);
10050 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10051 means A >= Y && A != MAX, but in this case we know that
10054 if (!TREE_SIDE_EFFECTS (arg0)
10055 && !TREE_SIDE_EFFECTS (arg1))
10057 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10058 if (tem && !operand_equal_p (tem, arg0, 0))
10059 return fold_build2 (code, type, tem, arg1);
10061 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10062 if (tem && !operand_equal_p (tem, arg1, 0))
10063 return fold_build2 (code, type, arg0, tem);
10067 /* We only do these simplifications if we are optimizing. */
10071 /* Check for things like (A || B) && (A || C). We can convert this
10072 to A || (B && C). Note that either operator can be any of the four
10073 truth and/or operations and the transformation will still be
10074 valid. Also note that we only care about order for the
10075 ANDIF and ORIF operators. If B contains side effects, this
10076 might change the truth-value of A. */
10077 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10078 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10079 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10080 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10081 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10082 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10084 tree a00 = TREE_OPERAND (arg0, 0);
10085 tree a01 = TREE_OPERAND (arg0, 1);
10086 tree a10 = TREE_OPERAND (arg1, 0);
10087 tree a11 = TREE_OPERAND (arg1, 1);
10088 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10089 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10090 && (code == TRUTH_AND_EXPR
10091 || code == TRUTH_OR_EXPR));
10093 if (operand_equal_p (a00, a10, 0))
10094 return fold_build2 (TREE_CODE (arg0), type, a00,
10095 fold_build2 (code, type, a01, a11));
10096 else if (commutative && operand_equal_p (a00, a11, 0))
10097 return fold_build2 (TREE_CODE (arg0), type, a00,
10098 fold_build2 (code, type, a01, a10));
10099 else if (commutative && operand_equal_p (a01, a10, 0))
10100 return fold_build2 (TREE_CODE (arg0), type, a01,
10101 fold_build2 (code, type, a00, a11));
10103 /* This case if tricky because we must either have commutative
10104 operators or else A10 must not have side-effects. */
10106 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10107 && operand_equal_p (a01, a11, 0))
10108 return fold_build2 (TREE_CODE (arg0), type,
10109 fold_build2 (code, type, a00, a10),
10113 /* See if we can build a range comparison. */
10114 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10117 /* Check for the possibility of merging component references. If our
10118 lhs is another similar operation, try to merge its rhs with our
10119 rhs. Then try to merge our lhs and rhs. */
10120 if (TREE_CODE (arg0) == code
10121 && 0 != (tem = fold_truthop (code, type,
10122 TREE_OPERAND (arg0, 1), arg1)))
10123 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10125 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10130 case TRUTH_ORIF_EXPR:
10131 /* Note that the operands of this must be ints
10132 and their values must be 0 or true.
10133 ("true" is a fixed value perhaps depending on the language.) */
10134 /* If first arg is constant true, return it. */
10135 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10136 return fold_convert (type, arg0);
10137 case TRUTH_OR_EXPR:
10138 /* If either arg is constant zero, drop it. */
10139 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10140 return non_lvalue (fold_convert (type, arg1));
10141 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10142 /* Preserve sequence points. */
10143 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10144 return non_lvalue (fold_convert (type, arg0));
10145 /* If second arg is constant true, result is true, but we must
10146 evaluate first arg. */
10147 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10148 return omit_one_operand (type, arg1, arg0);
10149 /* Likewise for first arg, but note this only occurs here for
10151 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10152 return omit_one_operand (type, arg0, arg1);
10154 /* !X || X is always true. */
10155 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10156 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10157 return omit_one_operand (type, integer_one_node, arg1);
10158 /* X || !X is always true. */
10159 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10160 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10161 return omit_one_operand (type, integer_one_node, arg0);
10165 case TRUTH_XOR_EXPR:
10166 /* If the second arg is constant zero, drop it. */
10167 if (integer_zerop (arg1))
10168 return non_lvalue (fold_convert (type, arg0));
10169 /* If the second arg is constant true, this is a logical inversion. */
10170 if (integer_onep (arg1))
10172 /* Only call invert_truthvalue if operand is a truth value. */
10173 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10174 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10176 tem = invert_truthvalue (arg0);
10177 return non_lvalue (fold_convert (type, tem));
10179 /* Identical arguments cancel to zero. */
10180 if (operand_equal_p (arg0, arg1, 0))
10181 return omit_one_operand (type, integer_zero_node, arg0);
10183 /* !X ^ X is always true. */
10184 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10185 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10186 return omit_one_operand (type, integer_one_node, arg1);
10188 /* X ^ !X is always true. */
10189 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10190 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10191 return omit_one_operand (type, integer_one_node, arg0);
10197 tem = fold_comparison (code, type, op0, op1);
10198 if (tem != NULL_TREE)
10201 /* bool_var != 0 becomes bool_var. */
10202 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10203 && code == NE_EXPR)
10204 return non_lvalue (fold_convert (type, arg0));
10206 /* bool_var == 1 becomes bool_var. */
10207 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10208 && code == EQ_EXPR)
10209 return non_lvalue (fold_convert (type, arg0));
10211 /* bool_var != 1 becomes !bool_var. */
10212 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10213 && code == NE_EXPR)
10214 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10216 /* bool_var == 0 becomes !bool_var. */
10217 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10218 && code == EQ_EXPR)
10219 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10221 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10222 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10223 && TREE_CODE (arg1) == INTEGER_CST)
10224 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10225 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10228 /* If this is an equality comparison of the address of a non-weak
10229 object against zero, then we know the result. */
10230 if (TREE_CODE (arg0) == ADDR_EXPR
10231 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10232 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10233 && integer_zerop (arg1))
10234 return constant_boolean_node (code != EQ_EXPR, type);
10236 /* If this is an equality comparison of the address of two non-weak,
10237 unaliased symbols neither of which are extern (since we do not
10238 have access to attributes for externs), then we know the result. */
10239 if (TREE_CODE (arg0) == ADDR_EXPR
10240 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10241 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10242 && ! lookup_attribute ("alias",
10243 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10244 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10245 && TREE_CODE (arg1) == ADDR_EXPR
10246 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10247 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10248 && ! lookup_attribute ("alias",
10249 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10250 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10252 /* We know that we're looking at the address of two
10253 non-weak, unaliased, static _DECL nodes.
10255 It is both wasteful and incorrect to call operand_equal_p
10256 to compare the two ADDR_EXPR nodes. It is wasteful in that
10257 all we need to do is test pointer equality for the arguments
10258 to the two ADDR_EXPR nodes. It is incorrect to use
10259 operand_equal_p as that function is NOT equivalent to a
10260 C equality test. It can in fact return false for two
10261 objects which would test as equal using the C equality
10263 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10264 return constant_boolean_node (equal
10265 ? code == EQ_EXPR : code != EQ_EXPR,
10269 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10270 a MINUS_EXPR of a constant, we can convert it into a comparison with
10271 a revised constant as long as no overflow occurs. */
10272 if (TREE_CODE (arg1) == INTEGER_CST
10273 && (TREE_CODE (arg0) == PLUS_EXPR
10274 || TREE_CODE (arg0) == MINUS_EXPR)
10275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10276 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10277 ? MINUS_EXPR : PLUS_EXPR,
10278 arg1, TREE_OPERAND (arg0, 1), 0))
10279 && ! TREE_CONSTANT_OVERFLOW (tem))
10280 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10282 /* Similarly for a NEGATE_EXPR. */
10283 if (TREE_CODE (arg0) == NEGATE_EXPR
10284 && TREE_CODE (arg1) == INTEGER_CST
10285 && 0 != (tem = negate_expr (arg1))
10286 && TREE_CODE (tem) == INTEGER_CST
10287 && ! TREE_CONSTANT_OVERFLOW (tem))
10288 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10290 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10291 for !=. Don't do this for ordered comparisons due to overflow. */
10292 if (TREE_CODE (arg0) == MINUS_EXPR
10293 && integer_zerop (arg1))
10294 return fold_build2 (code, type,
10295 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10297 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10298 if (TREE_CODE (arg0) == ABS_EXPR
10299 && (integer_zerop (arg1) || real_zerop (arg1)))
10300 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10302 /* If this is an EQ or NE comparison with zero and ARG0 is
10303 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10304 two operations, but the latter can be done in one less insn
10305 on machines that have only two-operand insns or on which a
10306 constant cannot be the first operand. */
10307 if (TREE_CODE (arg0) == BIT_AND_EXPR
10308 && integer_zerop (arg1))
10310 tree arg00 = TREE_OPERAND (arg0, 0);
10311 tree arg01 = TREE_OPERAND (arg0, 1);
10312 if (TREE_CODE (arg00) == LSHIFT_EXPR
10313 && integer_onep (TREE_OPERAND (arg00, 0)))
10315 fold_build2 (code, type,
10316 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10317 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10318 arg01, TREE_OPERAND (arg00, 1)),
10319 fold_convert (TREE_TYPE (arg0),
10320 integer_one_node)),
10322 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10323 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10325 fold_build2 (code, type,
10326 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10327 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10328 arg00, TREE_OPERAND (arg01, 1)),
10329 fold_convert (TREE_TYPE (arg0),
10330 integer_one_node)),
10334 /* If this is an NE or EQ comparison of zero against the result of a
10335 signed MOD operation whose second operand is a power of 2, make
10336 the MOD operation unsigned since it is simpler and equivalent. */
10337 if (integer_zerop (arg1)
10338 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10339 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10340 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10341 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10342 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10343 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10345 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10346 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10347 fold_convert (newtype,
10348 TREE_OPERAND (arg0, 0)),
10349 fold_convert (newtype,
10350 TREE_OPERAND (arg0, 1)));
10352 return fold_build2 (code, type, newmod,
10353 fold_convert (newtype, arg1));
10356 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10357 C1 is a valid shift constant, and C2 is a power of two, i.e.
10359 if (TREE_CODE (arg0) == BIT_AND_EXPR
10360 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10361 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10363 && integer_pow2p (TREE_OPERAND (arg0, 1))
10364 && integer_zerop (arg1))
10366 tree itype = TREE_TYPE (arg0);
10367 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10368 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10370 /* Check for a valid shift count. */
10371 if (TREE_INT_CST_HIGH (arg001) == 0
10372 && TREE_INT_CST_LOW (arg001) < prec)
10374 tree arg01 = TREE_OPERAND (arg0, 1);
10375 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10376 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10377 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10378 can be rewritten as (X & (C2 << C1)) != 0. */
10379 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10381 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10382 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10383 return fold_build2 (code, type, tem, arg1);
10385 /* Otherwise, for signed (arithmetic) shifts,
10386 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10387 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10388 else if (!TYPE_UNSIGNED (itype))
10389 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10390 arg000, build_int_cst (itype, 0));
10391 /* Otherwise, of unsigned (logical) shifts,
10392 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10393 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10395 return omit_one_operand (type,
10396 code == EQ_EXPR ? integer_one_node
10397 : integer_zero_node,
10402 /* If this is an NE comparison of zero with an AND of one, remove the
10403 comparison since the AND will give the correct value. */
10404 if (code == NE_EXPR
10405 && integer_zerop (arg1)
10406 && TREE_CODE (arg0) == BIT_AND_EXPR
10407 && integer_onep (TREE_OPERAND (arg0, 1)))
10408 return fold_convert (type, arg0);
10410 /* If we have (A & C) == C where C is a power of 2, convert this into
10411 (A & C) != 0. Similarly for NE_EXPR. */
10412 if (TREE_CODE (arg0) == BIT_AND_EXPR
10413 && integer_pow2p (TREE_OPERAND (arg0, 1))
10414 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10415 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10416 arg0, fold_convert (TREE_TYPE (arg0),
10417 integer_zero_node));
10419 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10420 bit, then fold the expression into A < 0 or A >= 0. */
10421 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10425 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10426 Similarly for NE_EXPR. */
10427 if (TREE_CODE (arg0) == BIT_AND_EXPR
10428 && TREE_CODE (arg1) == INTEGER_CST
10429 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10431 tree notc = fold_build1 (BIT_NOT_EXPR,
10432 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10433 TREE_OPERAND (arg0, 1));
10434 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10436 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10437 if (integer_nonzerop (dandnotc))
10438 return omit_one_operand (type, rslt, arg0);
10441 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10442 Similarly for NE_EXPR. */
10443 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10444 && TREE_CODE (arg1) == INTEGER_CST
10445 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10447 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10448 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10449 TREE_OPERAND (arg0, 1), notd);
10450 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10451 if (integer_nonzerop (candnotd))
10452 return omit_one_operand (type, rslt, arg0);
10455 /* If this is a comparison of a field, we may be able to simplify it. */
10456 if (((TREE_CODE (arg0) == COMPONENT_REF
10457 && lang_hooks.can_use_bit_fields_p ())
10458 || TREE_CODE (arg0) == BIT_FIELD_REF)
10459 /* Handle the constant case even without -O
10460 to make sure the warnings are given. */
10461 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10463 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10468 /* Optimize comparisons of strlen vs zero to a compare of the
10469 first character of the string vs zero. To wit,
10470 strlen(ptr) == 0 => *ptr == 0
10471 strlen(ptr) != 0 => *ptr != 0
10472 Other cases should reduce to one of these two (or a constant)
10473 due to the return value of strlen being unsigned. */
10474 if (TREE_CODE (arg0) == CALL_EXPR
10475 && integer_zerop (arg1))
10477 tree fndecl = get_callee_fndecl (arg0);
10481 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10482 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10483 && (arglist = TREE_OPERAND (arg0, 1))
10484 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10485 && ! TREE_CHAIN (arglist))
10487 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10488 return fold_build2 (code, type, iref,
10489 build_int_cst (TREE_TYPE (iref), 0));
10493 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10494 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10495 if (TREE_CODE (arg0) == RSHIFT_EXPR
10496 && integer_zerop (arg1)
10497 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10499 tree arg00 = TREE_OPERAND (arg0, 0);
10500 tree arg01 = TREE_OPERAND (arg0, 1);
10501 tree itype = TREE_TYPE (arg00);
10502 if (TREE_INT_CST_HIGH (arg01) == 0
10503 && TREE_INT_CST_LOW (arg01)
10504 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10506 if (TYPE_UNSIGNED (itype))
10508 itype = lang_hooks.types.signed_type (itype);
10509 arg00 = fold_convert (itype, arg00);
10511 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10512 type, arg00, build_int_cst (itype, 0));
10516 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10517 if (integer_zerop (arg1)
10518 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10519 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10520 TREE_OPERAND (arg0, 1));
10522 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10523 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10524 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10525 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10526 build_int_cst (TREE_TYPE (arg1), 0));
10527 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10528 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10529 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10530 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10531 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10532 build_int_cst (TREE_TYPE (arg1), 0));
10534 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10535 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10536 && TREE_CODE (arg1) == INTEGER_CST
10537 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10538 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10539 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10540 TREE_OPERAND (arg0, 1), arg1));
10542 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10543 (X & C) == 0 when C is a single bit. */
10544 if (TREE_CODE (arg0) == BIT_AND_EXPR
10545 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10546 && integer_zerop (arg1)
10547 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10549 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10550 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10551 TREE_OPERAND (arg0, 1));
10552 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10556 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10557 constant C is a power of two, i.e. a single bit. */
10558 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10559 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10560 && integer_zerop (arg1)
10561 && integer_pow2p (TREE_OPERAND (arg0, 1))
10562 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10563 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10565 tree arg00 = TREE_OPERAND (arg0, 0);
10566 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10567 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10570 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10571 when is C is a power of two, i.e. a single bit. */
10572 if (TREE_CODE (arg0) == BIT_AND_EXPR
10573 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10574 && integer_zerop (arg1)
10575 && integer_pow2p (TREE_OPERAND (arg0, 1))
10576 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10577 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10579 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10580 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10581 arg000, TREE_OPERAND (arg0, 1));
10582 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10583 tem, build_int_cst (TREE_TYPE (tem), 0));
10586 if (integer_zerop (arg1)
10587 && tree_expr_nonzero_p (arg0))
10589 tree res = constant_boolean_node (code==NE_EXPR, type);
10590 return omit_one_operand (type, res, arg0);
10598 tem = fold_comparison (code, type, op0, op1);
10599 if (tem != NULL_TREE)
10602 /* Transform comparisons of the form X +- C CMP X. */
10603 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10604 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10605 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10606 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10607 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10608 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10609 && !(flag_wrapv || flag_trapv))))
10611 tree arg01 = TREE_OPERAND (arg0, 1);
10612 enum tree_code code0 = TREE_CODE (arg0);
10615 if (TREE_CODE (arg01) == REAL_CST)
10616 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10618 is_positive = tree_int_cst_sgn (arg01);
10620 /* (X - c) > X becomes false. */
10621 if (code == GT_EXPR
10622 && ((code0 == MINUS_EXPR && is_positive >= 0)
10623 || (code0 == PLUS_EXPR && is_positive <= 0)))
10624 return constant_boolean_node (0, type);
10626 /* Likewise (X + c) < X becomes false. */
10627 if (code == LT_EXPR
10628 && ((code0 == PLUS_EXPR && is_positive >= 0)
10629 || (code0 == MINUS_EXPR && is_positive <= 0)))
10630 return constant_boolean_node (0, type);
10632 /* Convert (X - c) <= X to true. */
10633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10635 && ((code0 == MINUS_EXPR && is_positive >= 0)
10636 || (code0 == PLUS_EXPR && is_positive <= 0)))
10637 return constant_boolean_node (1, type);
10639 /* Convert (X + c) >= X to true. */
10640 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10642 && ((code0 == PLUS_EXPR && is_positive >= 0)
10643 || (code0 == MINUS_EXPR && is_positive <= 0)))
10644 return constant_boolean_node (1, type);
10646 if (TREE_CODE (arg01) == INTEGER_CST)
10648 /* Convert X + c > X and X - c < X to true for integers. */
10649 if (code == GT_EXPR
10650 && ((code0 == PLUS_EXPR && is_positive > 0)
10651 || (code0 == MINUS_EXPR && is_positive < 0)))
10652 return constant_boolean_node (1, type);
10654 if (code == LT_EXPR
10655 && ((code0 == MINUS_EXPR && is_positive > 0)
10656 || (code0 == PLUS_EXPR && is_positive < 0)))
10657 return constant_boolean_node (1, type);
10659 /* Convert X + c <= X and X - c >= X to false for integers. */
10660 if (code == LE_EXPR
10661 && ((code0 == PLUS_EXPR && is_positive > 0)
10662 || (code0 == MINUS_EXPR && is_positive < 0)))
10663 return constant_boolean_node (0, type);
10665 if (code == GE_EXPR
10666 && ((code0 == MINUS_EXPR && is_positive > 0)
10667 || (code0 == PLUS_EXPR && is_positive < 0)))
10668 return constant_boolean_node (0, type);
10672 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10673 This transformation affects the cases which are handled in later
10674 optimizations involving comparisons with non-negative constants. */
10675 if (TREE_CODE (arg1) == INTEGER_CST
10676 && TREE_CODE (arg0) != INTEGER_CST
10677 && tree_int_cst_sgn (arg1) > 0)
10679 if (code == GE_EXPR)
10681 arg1 = const_binop (MINUS_EXPR, arg1,
10682 build_int_cst (TREE_TYPE (arg1), 1), 0);
10683 return fold_build2 (GT_EXPR, type, arg0,
10684 fold_convert (TREE_TYPE (arg0), arg1));
10686 if (code == LT_EXPR)
10688 arg1 = const_binop (MINUS_EXPR, arg1,
10689 build_int_cst (TREE_TYPE (arg1), 1), 0);
10690 return fold_build2 (LE_EXPR, type, arg0,
10691 fold_convert (TREE_TYPE (arg0), arg1));
10695 /* Comparisons with the highest or lowest possible integer of
10696 the specified size will have known values. */
10698 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10700 if (TREE_CODE (arg1) == INTEGER_CST
10701 && ! TREE_CONSTANT_OVERFLOW (arg1)
10702 && width <= 2 * HOST_BITS_PER_WIDE_INT
10703 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10704 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10706 HOST_WIDE_INT signed_max_hi;
10707 unsigned HOST_WIDE_INT signed_max_lo;
10708 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10710 if (width <= HOST_BITS_PER_WIDE_INT)
10712 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10717 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10719 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10725 max_lo = signed_max_lo;
10726 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10732 width -= HOST_BITS_PER_WIDE_INT;
10733 signed_max_lo = -1;
10734 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10739 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10741 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10746 max_hi = signed_max_hi;
10747 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10751 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10752 && TREE_INT_CST_LOW (arg1) == max_lo)
10756 return omit_one_operand (type, integer_zero_node, arg0);
10759 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10762 return omit_one_operand (type, integer_one_node, arg0);
10765 return fold_build2 (NE_EXPR, type, arg0, arg1);
10767 /* The GE_EXPR and LT_EXPR cases above are not normally
10768 reached because of previous transformations. */
10773 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10775 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10779 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10780 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10782 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10783 return fold_build2 (NE_EXPR, type, arg0, arg1);
10787 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10789 && TREE_INT_CST_LOW (arg1) == min_lo)
10793 return omit_one_operand (type, integer_zero_node, arg0);
10796 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10799 return omit_one_operand (type, integer_one_node, arg0);
10802 return fold_build2 (NE_EXPR, type, op0, op1);
10807 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10809 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10813 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10814 return fold_build2 (NE_EXPR, type, arg0, arg1);
10816 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10817 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10822 else if (!in_gimple_form
10823 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10824 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10825 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10826 /* signed_type does not work on pointer types. */
10827 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10829 /* The following case also applies to X < signed_max+1
10830 and X >= signed_max+1 because previous transformations. */
10831 if (code == LE_EXPR || code == GT_EXPR)
10834 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10835 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10836 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10837 type, fold_convert (st0, arg0),
10838 build_int_cst (st1, 0));
10844 /* If we are comparing an ABS_EXPR with a constant, we can
10845 convert all the cases into explicit comparisons, but they may
10846 well not be faster than doing the ABS and one comparison.
10847 But ABS (X) <= C is a range comparison, which becomes a subtraction
10848 and a comparison, and is probably faster. */
10849 if (code == LE_EXPR
10850 && TREE_CODE (arg1) == INTEGER_CST
10851 && TREE_CODE (arg0) == ABS_EXPR
10852 && ! TREE_SIDE_EFFECTS (arg0)
10853 && (0 != (tem = negate_expr (arg1)))
10854 && TREE_CODE (tem) == INTEGER_CST
10855 && ! TREE_CONSTANT_OVERFLOW (tem))
10856 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10857 build2 (GE_EXPR, type,
10858 TREE_OPERAND (arg0, 0), tem),
10859 build2 (LE_EXPR, type,
10860 TREE_OPERAND (arg0, 0), arg1));
10862 /* Convert ABS_EXPR<x> >= 0 to true. */
10863 if (code == GE_EXPR
10864 && tree_expr_nonnegative_p (arg0)
10865 && (integer_zerop (arg1)
10866 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10867 && real_zerop (arg1))))
10868 return omit_one_operand (type, integer_one_node, arg0);
10870 /* Convert ABS_EXPR<x> < 0 to false. */
10871 if (code == LT_EXPR
10872 && tree_expr_nonnegative_p (arg0)
10873 && (integer_zerop (arg1) || real_zerop (arg1)))
10874 return omit_one_operand (type, integer_zero_node, arg0);
10876 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10877 and similarly for >= into !=. */
10878 if ((code == LT_EXPR || code == GE_EXPR)
10879 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10880 && TREE_CODE (arg1) == LSHIFT_EXPR
10881 && integer_onep (TREE_OPERAND (arg1, 0)))
10882 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10883 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10884 TREE_OPERAND (arg1, 1)),
10885 build_int_cst (TREE_TYPE (arg0), 0));
10887 if ((code == LT_EXPR || code == GE_EXPR)
10888 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10889 && (TREE_CODE (arg1) == NOP_EXPR
10890 || TREE_CODE (arg1) == CONVERT_EXPR)
10891 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10892 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10894 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10895 fold_convert (TREE_TYPE (arg0),
10896 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10897 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10899 build_int_cst (TREE_TYPE (arg0), 0));
10903 case UNORDERED_EXPR:
10911 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10913 t1 = fold_relational_const (code, type, arg0, arg1);
10914 if (t1 != NULL_TREE)
10918 /* If the first operand is NaN, the result is constant. */
10919 if (TREE_CODE (arg0) == REAL_CST
10920 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10921 && (code != LTGT_EXPR || ! flag_trapping_math))
10923 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10924 ? integer_zero_node
10925 : integer_one_node;
10926 return omit_one_operand (type, t1, arg1);
10929 /* If the second operand is NaN, the result is constant. */
10930 if (TREE_CODE (arg1) == REAL_CST
10931 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10932 && (code != LTGT_EXPR || ! flag_trapping_math))
10934 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10935 ? integer_zero_node
10936 : integer_one_node;
10937 return omit_one_operand (type, t1, arg0);
10940 /* Simplify unordered comparison of something with itself. */
10941 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10942 && operand_equal_p (arg0, arg1, 0))
10943 return constant_boolean_node (1, type);
10945 if (code == LTGT_EXPR
10946 && !flag_trapping_math
10947 && operand_equal_p (arg0, arg1, 0))
10948 return constant_boolean_node (0, type);
10950 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10952 tree targ0 = strip_float_extensions (arg0);
10953 tree targ1 = strip_float_extensions (arg1);
10954 tree newtype = TREE_TYPE (targ0);
10956 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10957 newtype = TREE_TYPE (targ1);
10959 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10960 return fold_build2 (code, type, fold_convert (newtype, targ0),
10961 fold_convert (newtype, targ1));
10966 case COMPOUND_EXPR:
10967 /* When pedantic, a compound expression can be neither an lvalue
10968 nor an integer constant expression. */
10969 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10971 /* Don't let (0, 0) be null pointer constant. */
10972 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10973 : fold_convert (type, arg1);
10974 return pedantic_non_lvalue (tem);
10977 if ((TREE_CODE (arg0) == REAL_CST
10978 && TREE_CODE (arg1) == REAL_CST)
10979 || (TREE_CODE (arg0) == INTEGER_CST
10980 && TREE_CODE (arg1) == INTEGER_CST))
10981 return build_complex (type, arg0, arg1);
10985 /* An ASSERT_EXPR should never be passed to fold_binary. */
10986 gcc_unreachable ();
10990 } /* switch (code) */
10993 /* Callback for walk_tree, looking for LABEL_EXPR.
10994 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10995 Do not check the sub-tree of GOTO_EXPR. */
10998 contains_label_1 (tree *tp,
10999 int *walk_subtrees,
11000 void *data ATTRIBUTE_UNUSED)
11002 switch (TREE_CODE (*tp))
11007 *walk_subtrees = 0;
11014 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11015 accessible from outside the sub-tree. Returns NULL_TREE if no
11016 addressable label is found. */
11019 contains_label_p (tree st)
11021 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11024 /* Fold a ternary expression of code CODE and type TYPE with operands
11025 OP0, OP1, and OP2. Return the folded expression if folding is
11026 successful. Otherwise, return NULL_TREE. */
11029 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11032 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11033 enum tree_code_class kind = TREE_CODE_CLASS (code);
11035 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11036 && TREE_CODE_LENGTH (code) == 3);
11038 /* Strip any conversions that don't change the mode. This is safe
11039 for every expression, except for a comparison expression because
11040 its signedness is derived from its operands. So, in the latter
11041 case, only strip conversions that don't change the signedness.
11043 Note that this is done as an internal manipulation within the
11044 constant folder, in order to find the simplest representation of
11045 the arguments so that their form can be studied. In any cases,
11046 the appropriate type conversions should be put back in the tree
11047 that will get out of the constant folder. */
11062 case COMPONENT_REF:
11063 if (TREE_CODE (arg0) == CONSTRUCTOR
11064 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11066 unsigned HOST_WIDE_INT idx;
11068 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11075 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11076 so all simple results must be passed through pedantic_non_lvalue. */
11077 if (TREE_CODE (arg0) == INTEGER_CST)
11079 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11080 tem = integer_zerop (arg0) ? op2 : op1;
11081 /* Only optimize constant conditions when the selected branch
11082 has the same type as the COND_EXPR. This avoids optimizing
11083 away "c ? x : throw", where the throw has a void type.
11084 Avoid throwing away that operand which contains label. */
11085 if ((!TREE_SIDE_EFFECTS (unused_op)
11086 || !contains_label_p (unused_op))
11087 && (! VOID_TYPE_P (TREE_TYPE (tem))
11088 || VOID_TYPE_P (type)))
11089 return pedantic_non_lvalue (tem);
11092 if (operand_equal_p (arg1, op2, 0))
11093 return pedantic_omit_one_operand (type, arg1, arg0);
11095 /* If we have A op B ? A : C, we may be able to convert this to a
11096 simpler expression, depending on the operation and the values
11097 of B and C. Signed zeros prevent all of these transformations,
11098 for reasons given above each one.
11100 Also try swapping the arguments and inverting the conditional. */
11101 if (COMPARISON_CLASS_P (arg0)
11102 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11103 arg1, TREE_OPERAND (arg0, 1))
11104 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11106 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11111 if (COMPARISON_CLASS_P (arg0)
11112 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11114 TREE_OPERAND (arg0, 1))
11115 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11117 tem = invert_truthvalue (arg0);
11118 if (COMPARISON_CLASS_P (tem))
11120 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11126 /* If the second operand is simpler than the third, swap them
11127 since that produces better jump optimization results. */
11128 if (truth_value_p (TREE_CODE (arg0))
11129 && tree_swap_operands_p (op1, op2, false))
11131 /* See if this can be inverted. If it can't, possibly because
11132 it was a floating-point inequality comparison, don't do
11134 tem = invert_truthvalue (arg0);
11136 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11137 return fold_build3 (code, type, tem, op2, op1);
11140 /* Convert A ? 1 : 0 to simply A. */
11141 if (integer_onep (op1)
11142 && integer_zerop (op2)
11143 /* If we try to convert OP0 to our type, the
11144 call to fold will try to move the conversion inside
11145 a COND, which will recurse. In that case, the COND_EXPR
11146 is probably the best choice, so leave it alone. */
11147 && type == TREE_TYPE (arg0))
11148 return pedantic_non_lvalue (arg0);
11150 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11151 over COND_EXPR in cases such as floating point comparisons. */
11152 if (integer_zerop (op1)
11153 && integer_onep (op2)
11154 && truth_value_p (TREE_CODE (arg0)))
11155 return pedantic_non_lvalue (fold_convert (type,
11156 invert_truthvalue (arg0)));
11158 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11159 if (TREE_CODE (arg0) == LT_EXPR
11160 && integer_zerop (TREE_OPERAND (arg0, 1))
11161 && integer_zerop (op2)
11162 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11163 return fold_convert (type,
11164 fold_build2 (BIT_AND_EXPR,
11165 TREE_TYPE (tem), tem,
11166 fold_convert (TREE_TYPE (tem), arg1)));
11168 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11169 already handled above. */
11170 if (TREE_CODE (arg0) == BIT_AND_EXPR
11171 && integer_onep (TREE_OPERAND (arg0, 1))
11172 && integer_zerop (op2)
11173 && integer_pow2p (arg1))
11175 tree tem = TREE_OPERAND (arg0, 0);
11177 if (TREE_CODE (tem) == RSHIFT_EXPR
11178 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11179 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11180 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11181 return fold_build2 (BIT_AND_EXPR, type,
11182 TREE_OPERAND (tem, 0), arg1);
11185 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11186 is probably obsolete because the first operand should be a
11187 truth value (that's why we have the two cases above), but let's
11188 leave it in until we can confirm this for all front-ends. */
11189 if (integer_zerop (op2)
11190 && TREE_CODE (arg0) == NE_EXPR
11191 && integer_zerop (TREE_OPERAND (arg0, 1))
11192 && integer_pow2p (arg1)
11193 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11194 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11195 arg1, OEP_ONLY_CONST))
11196 return pedantic_non_lvalue (fold_convert (type,
11197 TREE_OPERAND (arg0, 0)));
11199 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11200 if (integer_zerop (op2)
11201 && truth_value_p (TREE_CODE (arg0))
11202 && truth_value_p (TREE_CODE (arg1)))
11203 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11204 fold_convert (type, arg0),
11207 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11208 if (integer_onep (op2)
11209 && truth_value_p (TREE_CODE (arg0))
11210 && truth_value_p (TREE_CODE (arg1)))
11212 /* Only perform transformation if ARG0 is easily inverted. */
11213 tem = invert_truthvalue (arg0);
11214 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11215 return fold_build2 (TRUTH_ORIF_EXPR, type,
11216 fold_convert (type, tem),
11220 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11221 if (integer_zerop (arg1)
11222 && truth_value_p (TREE_CODE (arg0))
11223 && truth_value_p (TREE_CODE (op2)))
11225 /* Only perform transformation if ARG0 is easily inverted. */
11226 tem = invert_truthvalue (arg0);
11227 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11228 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11229 fold_convert (type, tem),
11233 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11234 if (integer_onep (arg1)
11235 && truth_value_p (TREE_CODE (arg0))
11236 && truth_value_p (TREE_CODE (op2)))
11237 return fold_build2 (TRUTH_ORIF_EXPR, type,
11238 fold_convert (type, arg0),
11244 /* Check for a built-in function. */
11245 if (TREE_CODE (op0) == ADDR_EXPR
11246 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11247 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11248 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11251 case BIT_FIELD_REF:
11252 if (TREE_CODE (arg0) == VECTOR_CST
11253 && type == TREE_TYPE (TREE_TYPE (arg0))
11254 && host_integerp (arg1, 1)
11255 && host_integerp (op2, 1))
11257 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11258 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11261 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11262 && (idx % width) == 0
11263 && (idx = idx / width)
11264 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11266 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11267 while (idx-- > 0 && elements)
11268 elements = TREE_CHAIN (elements);
11270 return TREE_VALUE (elements);
11272 return fold_convert (type, integer_zero_node);
11279 } /* switch (code) */
11282 /* Perform constant folding and related simplification of EXPR.
11283 The related simplifications include x*1 => x, x*0 => 0, etc.,
11284 and application of the associative law.
11285 NOP_EXPR conversions may be removed freely (as long as we
11286 are careful not to change the type of the overall expression).
11287 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11288 but we can constant-fold them if they have constant operands. */
11290 #ifdef ENABLE_FOLD_CHECKING
11291 # define fold(x) fold_1 (x)
11292 static tree fold_1 (tree);
11298 const tree t = expr;
11299 enum tree_code code = TREE_CODE (t);
11300 enum tree_code_class kind = TREE_CODE_CLASS (code);
11303 /* Return right away if a constant. */
11304 if (kind == tcc_constant)
11307 if (IS_EXPR_CODE_CLASS (kind))
11309 tree type = TREE_TYPE (t);
11310 tree op0, op1, op2;
11312 switch (TREE_CODE_LENGTH (code))
11315 op0 = TREE_OPERAND (t, 0);
11316 tem = fold_unary (code, type, op0);
11317 return tem ? tem : expr;
11319 op0 = TREE_OPERAND (t, 0);
11320 op1 = TREE_OPERAND (t, 1);
11321 tem = fold_binary (code, type, op0, op1);
11322 return tem ? tem : expr;
11324 op0 = TREE_OPERAND (t, 0);
11325 op1 = TREE_OPERAND (t, 1);
11326 op2 = TREE_OPERAND (t, 2);
11327 tem = fold_ternary (code, type, op0, op1, op2);
11328 return tem ? tem : expr;
11337 return fold (DECL_INITIAL (t));
11341 } /* switch (code) */
11344 #ifdef ENABLE_FOLD_CHECKING
11347 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11348 static void fold_check_failed (tree, tree);
11349 void print_fold_checksum (tree);
11351 /* When --enable-checking=fold, compute a digest of expr before
11352 and after actual fold call to see if fold did not accidentally
11353 change original expr. */
11359 struct md5_ctx ctx;
11360 unsigned char checksum_before[16], checksum_after[16];
11363 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11364 md5_init_ctx (&ctx);
11365 fold_checksum_tree (expr, &ctx, ht);
11366 md5_finish_ctx (&ctx, checksum_before);
11369 ret = fold_1 (expr);
11371 md5_init_ctx (&ctx);
11372 fold_checksum_tree (expr, &ctx, ht);
11373 md5_finish_ctx (&ctx, checksum_after);
11376 if (memcmp (checksum_before, checksum_after, 16))
11377 fold_check_failed (expr, ret);
11383 print_fold_checksum (tree expr)
11385 struct md5_ctx ctx;
11386 unsigned char checksum[16], cnt;
11389 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11390 md5_init_ctx (&ctx);
11391 fold_checksum_tree (expr, &ctx, ht);
11392 md5_finish_ctx (&ctx, checksum);
11394 for (cnt = 0; cnt < 16; ++cnt)
11395 fprintf (stderr, "%02x", checksum[cnt]);
11396 putc ('\n', stderr);
11400 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11402 internal_error ("fold check: original tree changed by fold");
11406 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11409 enum tree_code code;
11410 struct tree_function_decl buf;
11415 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11416 <= sizeof (struct tree_function_decl))
11417 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11420 slot = htab_find_slot (ht, expr, INSERT);
11424 code = TREE_CODE (expr);
11425 if (TREE_CODE_CLASS (code) == tcc_declaration
11426 && DECL_ASSEMBLER_NAME_SET_P (expr))
11428 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11429 memcpy ((char *) &buf, expr, tree_size (expr));
11430 expr = (tree) &buf;
11431 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11433 else if (TREE_CODE_CLASS (code) == tcc_type
11434 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11435 || TYPE_CACHED_VALUES_P (expr)
11436 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11438 /* Allow these fields to be modified. */
11439 memcpy ((char *) &buf, expr, tree_size (expr));
11440 expr = (tree) &buf;
11441 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11442 TYPE_POINTER_TO (expr) = NULL;
11443 TYPE_REFERENCE_TO (expr) = NULL;
11444 if (TYPE_CACHED_VALUES_P (expr))
11446 TYPE_CACHED_VALUES_P (expr) = 0;
11447 TYPE_CACHED_VALUES (expr) = NULL;
11450 md5_process_bytes (expr, tree_size (expr), ctx);
11451 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11452 if (TREE_CODE_CLASS (code) != tcc_type
11453 && TREE_CODE_CLASS (code) != tcc_declaration
11454 && code != TREE_LIST)
11455 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11456 switch (TREE_CODE_CLASS (code))
11462 md5_process_bytes (TREE_STRING_POINTER (expr),
11463 TREE_STRING_LENGTH (expr), ctx);
11466 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11467 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11470 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11476 case tcc_exceptional:
11480 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11481 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11482 expr = TREE_CHAIN (expr);
11483 goto recursive_label;
11486 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11487 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11493 case tcc_expression:
11494 case tcc_reference:
11495 case tcc_comparison:
11498 case tcc_statement:
11499 len = TREE_CODE_LENGTH (code);
11500 for (i = 0; i < len; ++i)
11501 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11503 case tcc_declaration:
11504 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11505 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11506 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11508 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11509 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11510 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11511 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11512 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11514 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11515 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11517 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11519 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11520 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11521 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11525 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11526 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11527 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11528 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11529 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11530 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11531 if (INTEGRAL_TYPE_P (expr)
11532 || SCALAR_FLOAT_TYPE_P (expr))
11534 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11535 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11537 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11538 if (TREE_CODE (expr) == RECORD_TYPE
11539 || TREE_CODE (expr) == UNION_TYPE
11540 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11541 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11542 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11551 /* Fold a unary tree expression with code CODE of type TYPE with an
11552 operand OP0. Return a folded expression if successful. Otherwise,
11553 return a tree expression with code CODE of type TYPE with an
11557 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11560 #ifdef ENABLE_FOLD_CHECKING
11561 unsigned char checksum_before[16], checksum_after[16];
11562 struct md5_ctx ctx;
11565 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11566 md5_init_ctx (&ctx);
11567 fold_checksum_tree (op0, &ctx, ht);
11568 md5_finish_ctx (&ctx, checksum_before);
11572 tem = fold_unary (code, type, op0);
11574 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11576 #ifdef ENABLE_FOLD_CHECKING
11577 md5_init_ctx (&ctx);
11578 fold_checksum_tree (op0, &ctx, ht);
11579 md5_finish_ctx (&ctx, checksum_after);
11582 if (memcmp (checksum_before, checksum_after, 16))
11583 fold_check_failed (op0, tem);
11588 /* Fold a binary tree expression with code CODE of type TYPE with
11589 operands OP0 and OP1. Return a folded expression if successful.
11590 Otherwise, return a tree expression with code CODE of type TYPE
11591 with operands OP0 and OP1. */
11594 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11598 #ifdef ENABLE_FOLD_CHECKING
11599 unsigned char checksum_before_op0[16],
11600 checksum_before_op1[16],
11601 checksum_after_op0[16],
11602 checksum_after_op1[16];
11603 struct md5_ctx ctx;
11606 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11607 md5_init_ctx (&ctx);
11608 fold_checksum_tree (op0, &ctx, ht);
11609 md5_finish_ctx (&ctx, checksum_before_op0);
11612 md5_init_ctx (&ctx);
11613 fold_checksum_tree (op1, &ctx, ht);
11614 md5_finish_ctx (&ctx, checksum_before_op1);
11618 tem = fold_binary (code, type, op0, op1);
11620 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11622 #ifdef ENABLE_FOLD_CHECKING
11623 md5_init_ctx (&ctx);
11624 fold_checksum_tree (op0, &ctx, ht);
11625 md5_finish_ctx (&ctx, checksum_after_op0);
11628 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11629 fold_check_failed (op0, tem);
11631 md5_init_ctx (&ctx);
11632 fold_checksum_tree (op1, &ctx, ht);
11633 md5_finish_ctx (&ctx, checksum_after_op1);
11636 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11637 fold_check_failed (op1, tem);
11642 /* Fold a ternary tree expression with code CODE of type TYPE with
11643 operands OP0, OP1, and OP2. Return a folded expression if
11644 successful. Otherwise, return a tree expression with code CODE of
11645 type TYPE with operands OP0, OP1, and OP2. */
11648 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11652 #ifdef ENABLE_FOLD_CHECKING
11653 unsigned char checksum_before_op0[16],
11654 checksum_before_op1[16],
11655 checksum_before_op2[16],
11656 checksum_after_op0[16],
11657 checksum_after_op1[16],
11658 checksum_after_op2[16];
11659 struct md5_ctx ctx;
11662 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11663 md5_init_ctx (&ctx);
11664 fold_checksum_tree (op0, &ctx, ht);
11665 md5_finish_ctx (&ctx, checksum_before_op0);
11668 md5_init_ctx (&ctx);
11669 fold_checksum_tree (op1, &ctx, ht);
11670 md5_finish_ctx (&ctx, checksum_before_op1);
11673 md5_init_ctx (&ctx);
11674 fold_checksum_tree (op2, &ctx, ht);
11675 md5_finish_ctx (&ctx, checksum_before_op2);
11679 tem = fold_ternary (code, type, op0, op1, op2);
11681 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11683 #ifdef ENABLE_FOLD_CHECKING
11684 md5_init_ctx (&ctx);
11685 fold_checksum_tree (op0, &ctx, ht);
11686 md5_finish_ctx (&ctx, checksum_after_op0);
11689 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11690 fold_check_failed (op0, tem);
11692 md5_init_ctx (&ctx);
11693 fold_checksum_tree (op1, &ctx, ht);
11694 md5_finish_ctx (&ctx, checksum_after_op1);
11697 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11698 fold_check_failed (op1, tem);
11700 md5_init_ctx (&ctx);
11701 fold_checksum_tree (op2, &ctx, ht);
11702 md5_finish_ctx (&ctx, checksum_after_op2);
11705 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11706 fold_check_failed (op2, tem);
11711 /* Perform constant folding and related simplification of initializer
11712 expression EXPR. These behave identically to "fold_buildN" but ignore
11713 potential run-time traps and exceptions that fold must preserve. */
11715 #define START_FOLD_INIT \
11716 int saved_signaling_nans = flag_signaling_nans;\
11717 int saved_trapping_math = flag_trapping_math;\
11718 int saved_rounding_math = flag_rounding_math;\
11719 int saved_trapv = flag_trapv;\
11720 int saved_folding_initializer = folding_initializer;\
11721 flag_signaling_nans = 0;\
11722 flag_trapping_math = 0;\
11723 flag_rounding_math = 0;\
11725 folding_initializer = 1;
11727 #define END_FOLD_INIT \
11728 flag_signaling_nans = saved_signaling_nans;\
11729 flag_trapping_math = saved_trapping_math;\
11730 flag_rounding_math = saved_rounding_math;\
11731 flag_trapv = saved_trapv;\
11732 folding_initializer = saved_folding_initializer;
11735 fold_build1_initializer (enum tree_code code, tree type, tree op)
11740 result = fold_build1 (code, type, op);
11747 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11752 result = fold_build2 (code, type, op0, op1);
11759 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11765 result = fold_build3 (code, type, op0, op1, op2);
11771 #undef START_FOLD_INIT
11772 #undef END_FOLD_INIT
11774 /* Determine if first argument is a multiple of second argument. Return 0 if
11775 it is not, or we cannot easily determined it to be.
11777 An example of the sort of thing we care about (at this point; this routine
11778 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11779 fold cases do now) is discovering that
11781 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11787 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11789 This code also handles discovering that
11791 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11793 is a multiple of 8 so we don't have to worry about dealing with a
11794 possible remainder.
11796 Note that we *look* inside a SAVE_EXPR only to determine how it was
11797 calculated; it is not safe for fold to do much of anything else with the
11798 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11799 at run time. For example, the latter example above *cannot* be implemented
11800 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11801 evaluation time of the original SAVE_EXPR is not necessarily the same at
11802 the time the new expression is evaluated. The only optimization of this
11803 sort that would be valid is changing
11805 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11809 SAVE_EXPR (I) * SAVE_EXPR (J)
11811 (where the same SAVE_EXPR (J) is used in the original and the
11812 transformed version). */
11815 multiple_of_p (tree type, tree top, tree bottom)
11817 if (operand_equal_p (top, bottom, 0))
11820 if (TREE_CODE (type) != INTEGER_TYPE)
11823 switch (TREE_CODE (top))
11826 /* Bitwise and provides a power of two multiple. If the mask is
11827 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11828 if (!integer_pow2p (bottom))
11833 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11834 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11838 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11839 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11842 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11846 op1 = TREE_OPERAND (top, 1);
11847 /* const_binop may not detect overflow correctly,
11848 so check for it explicitly here. */
11849 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11850 > TREE_INT_CST_LOW (op1)
11851 && TREE_INT_CST_HIGH (op1) == 0
11852 && 0 != (t1 = fold_convert (type,
11853 const_binop (LSHIFT_EXPR,
11856 && ! TREE_OVERFLOW (t1))
11857 return multiple_of_p (type, t1, bottom);
11862 /* Can't handle conversions from non-integral or wider integral type. */
11863 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11864 || (TYPE_PRECISION (type)
11865 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11868 /* .. fall through ... */
11871 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11874 if (TREE_CODE (bottom) != INTEGER_CST
11875 || (TYPE_UNSIGNED (type)
11876 && (tree_int_cst_sgn (top) < 0
11877 || tree_int_cst_sgn (bottom) < 0)))
11879 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11887 /* Return true if `t' is known to be non-negative. */
11890 tree_expr_nonnegative_p (tree t)
11892 if (t == error_mark_node)
11895 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11898 switch (TREE_CODE (t))
11901 /* Query VRP to see if it has recorded any information about
11902 the range of this object. */
11903 return ssa_name_nonnegative_p (t);
11906 /* We can't return 1 if flag_wrapv is set because
11907 ABS_EXPR<INT_MIN> = INT_MIN. */
11908 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11913 return tree_int_cst_sgn (t) >= 0;
11916 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11919 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11920 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11921 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11923 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11924 both unsigned and at least 2 bits shorter than the result. */
11925 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11926 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11927 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11929 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11930 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11931 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11932 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11934 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11935 TYPE_PRECISION (inner2)) + 1;
11936 return prec < TYPE_PRECISION (TREE_TYPE (t));
11942 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11944 /* x * x for floating point x is always non-negative. */
11945 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11947 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11948 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11951 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11952 both unsigned and their total bits is shorter than the result. */
11953 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11954 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11955 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11957 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11958 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11959 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11960 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11961 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11962 < TYPE_PRECISION (TREE_TYPE (t));
11968 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11969 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11975 case TRUNC_DIV_EXPR:
11976 case CEIL_DIV_EXPR:
11977 case FLOOR_DIV_EXPR:
11978 case ROUND_DIV_EXPR:
11979 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11980 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11982 case TRUNC_MOD_EXPR:
11983 case CEIL_MOD_EXPR:
11984 case FLOOR_MOD_EXPR:
11985 case ROUND_MOD_EXPR:
11987 case NON_LVALUE_EXPR:
11989 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11991 case COMPOUND_EXPR:
11993 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11996 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11999 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12000 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12004 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12005 tree outer_type = TREE_TYPE (t);
12007 if (TREE_CODE (outer_type) == REAL_TYPE)
12009 if (TREE_CODE (inner_type) == REAL_TYPE)
12010 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12011 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12013 if (TYPE_UNSIGNED (inner_type))
12015 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12018 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12020 if (TREE_CODE (inner_type) == REAL_TYPE)
12021 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12022 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12023 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12024 && TYPE_UNSIGNED (inner_type);
12031 tree temp = TARGET_EXPR_SLOT (t);
12032 t = TARGET_EXPR_INITIAL (t);
12034 /* If the initializer is non-void, then it's a normal expression
12035 that will be assigned to the slot. */
12036 if (!VOID_TYPE_P (t))
12037 return tree_expr_nonnegative_p (t);
12039 /* Otherwise, the initializer sets the slot in some way. One common
12040 way is an assignment statement at the end of the initializer. */
12043 if (TREE_CODE (t) == BIND_EXPR)
12044 t = expr_last (BIND_EXPR_BODY (t));
12045 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12046 || TREE_CODE (t) == TRY_CATCH_EXPR)
12047 t = expr_last (TREE_OPERAND (t, 0));
12048 else if (TREE_CODE (t) == STATEMENT_LIST)
12053 if (TREE_CODE (t) == MODIFY_EXPR
12054 && TREE_OPERAND (t, 0) == temp)
12055 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12062 tree fndecl = get_callee_fndecl (t);
12063 tree arglist = TREE_OPERAND (t, 1);
12064 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12065 switch (DECL_FUNCTION_CODE (fndecl))
12067 CASE_FLT_FN (BUILT_IN_ACOS):
12068 CASE_FLT_FN (BUILT_IN_ACOSH):
12069 CASE_FLT_FN (BUILT_IN_CABS):
12070 CASE_FLT_FN (BUILT_IN_COSH):
12071 CASE_FLT_FN (BUILT_IN_ERFC):
12072 CASE_FLT_FN (BUILT_IN_EXP):
12073 CASE_FLT_FN (BUILT_IN_EXP10):
12074 CASE_FLT_FN (BUILT_IN_EXP2):
12075 CASE_FLT_FN (BUILT_IN_FABS):
12076 CASE_FLT_FN (BUILT_IN_FDIM):
12077 CASE_FLT_FN (BUILT_IN_HYPOT):
12078 CASE_FLT_FN (BUILT_IN_POW10):
12079 CASE_INT_FN (BUILT_IN_FFS):
12080 CASE_INT_FN (BUILT_IN_PARITY):
12081 CASE_INT_FN (BUILT_IN_POPCOUNT):
12085 CASE_FLT_FN (BUILT_IN_SQRT):
12086 /* sqrt(-0.0) is -0.0. */
12087 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12089 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12091 CASE_FLT_FN (BUILT_IN_ASINH):
12092 CASE_FLT_FN (BUILT_IN_ATAN):
12093 CASE_FLT_FN (BUILT_IN_ATANH):
12094 CASE_FLT_FN (BUILT_IN_CBRT):
12095 CASE_FLT_FN (BUILT_IN_CEIL):
12096 CASE_FLT_FN (BUILT_IN_ERF):
12097 CASE_FLT_FN (BUILT_IN_EXPM1):
12098 CASE_FLT_FN (BUILT_IN_FLOOR):
12099 CASE_FLT_FN (BUILT_IN_FMOD):
12100 CASE_FLT_FN (BUILT_IN_FREXP):
12101 CASE_FLT_FN (BUILT_IN_LCEIL):
12102 CASE_FLT_FN (BUILT_IN_LDEXP):
12103 CASE_FLT_FN (BUILT_IN_LFLOOR):
12104 CASE_FLT_FN (BUILT_IN_LLCEIL):
12105 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12106 CASE_FLT_FN (BUILT_IN_LLRINT):
12107 CASE_FLT_FN (BUILT_IN_LLROUND):
12108 CASE_FLT_FN (BUILT_IN_LRINT):
12109 CASE_FLT_FN (BUILT_IN_LROUND):
12110 CASE_FLT_FN (BUILT_IN_MODF):
12111 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12112 CASE_FLT_FN (BUILT_IN_POW):
12113 CASE_FLT_FN (BUILT_IN_RINT):
12114 CASE_FLT_FN (BUILT_IN_ROUND):
12115 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12116 CASE_FLT_FN (BUILT_IN_SINH):
12117 CASE_FLT_FN (BUILT_IN_TANH):
12118 CASE_FLT_FN (BUILT_IN_TRUNC):
12119 /* True if the 1st argument is nonnegative. */
12120 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12122 CASE_FLT_FN (BUILT_IN_FMAX):
12123 /* True if the 1st OR 2nd arguments are nonnegative. */
12124 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12125 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12127 CASE_FLT_FN (BUILT_IN_FMIN):
12128 /* True if the 1st AND 2nd arguments are nonnegative. */
12129 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12130 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12132 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12133 /* True if the 2nd argument is nonnegative. */
12134 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12141 /* ... fall through ... */
12144 if (truth_value_p (TREE_CODE (t)))
12145 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12149 /* We don't know sign of `t', so be conservative and return false. */
12153 /* Return true when T is an address and is known to be nonzero.
12154 For floating point we further ensure that T is not denormal.
12155 Similar logic is present in nonzero_address in rtlanal.h. */
12158 tree_expr_nonzero_p (tree t)
12160 tree type = TREE_TYPE (t);
12162 /* Doing something useful for floating point would need more work. */
12163 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12166 switch (TREE_CODE (t))
12169 /* Query VRP to see if it has recorded any information about
12170 the range of this object. */
12171 return ssa_name_nonzero_p (t);
12174 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12177 /* We used to test for !integer_zerop here. This does not work correctly
12178 if TREE_CONSTANT_OVERFLOW (t). */
12179 return (TREE_INT_CST_LOW (t) != 0
12180 || TREE_INT_CST_HIGH (t) != 0);
12183 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12185 /* With the presence of negative values it is hard
12186 to say something. */
12187 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12188 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12190 /* One of operands must be positive and the other non-negative. */
12191 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12192 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12197 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12199 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12200 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12206 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12207 tree outer_type = TREE_TYPE (t);
12209 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12210 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12216 tree base = get_base_address (TREE_OPERAND (t, 0));
12221 /* Weak declarations may link to NULL. */
12222 if (VAR_OR_FUNCTION_DECL_P (base))
12223 return !DECL_WEAK (base);
12225 /* Constants are never weak. */
12226 if (CONSTANT_CLASS_P (base))
12233 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12234 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12237 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12238 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12241 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12243 /* When both operands are nonzero, then MAX must be too. */
12244 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12247 /* MAX where operand 0 is positive is positive. */
12248 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12250 /* MAX where operand 1 is positive is positive. */
12251 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12252 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12256 case COMPOUND_EXPR:
12259 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12262 case NON_LVALUE_EXPR:
12263 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12266 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12267 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12270 return alloca_call_p (t);
12278 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12279 attempt to fold the expression to a constant without modifying TYPE,
12282 If the expression could be simplified to a constant, then return
12283 the constant. If the expression would not be simplified to a
12284 constant, then return NULL_TREE. */
12287 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12289 tree tem = fold_binary (code, type, op0, op1);
12290 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12293 /* Given the components of a unary expression CODE, TYPE and OP0,
12294 attempt to fold the expression to a constant without modifying
12297 If the expression could be simplified to a constant, then return
12298 the constant. If the expression would not be simplified to a
12299 constant, then return NULL_TREE. */
12302 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12304 tree tem = fold_unary (code, type, op0);
12305 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12308 /* If EXP represents referencing an element in a constant string
12309 (either via pointer arithmetic or array indexing), return the
12310 tree representing the value accessed, otherwise return NULL. */
12313 fold_read_from_constant_string (tree exp)
12315 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12317 tree exp1 = TREE_OPERAND (exp, 0);
12321 if (TREE_CODE (exp) == INDIRECT_REF)
12322 string = string_constant (exp1, &index);
12325 tree low_bound = array_ref_low_bound (exp);
12326 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12328 /* Optimize the special-case of a zero lower bound.
12330 We convert the low_bound to sizetype to avoid some problems
12331 with constant folding. (E.g. suppose the lower bound is 1,
12332 and its mode is QI. Without the conversion,l (ARRAY
12333 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12334 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12335 if (! integer_zerop (low_bound))
12336 index = size_diffop (index, fold_convert (sizetype, low_bound));
12342 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12343 && TREE_CODE (string) == STRING_CST
12344 && TREE_CODE (index) == INTEGER_CST
12345 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12346 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12348 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12349 return fold_convert (TREE_TYPE (exp),
12350 build_int_cst (NULL_TREE,
12351 (TREE_STRING_POINTER (string)
12352 [TREE_INT_CST_LOW (index)])));
12357 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12358 an integer constant or real constant.
12360 TYPE is the type of the result. */
12363 fold_negate_const (tree arg0, tree type)
12365 tree t = NULL_TREE;
12367 switch (TREE_CODE (arg0))
12371 unsigned HOST_WIDE_INT low;
12372 HOST_WIDE_INT high;
12373 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12374 TREE_INT_CST_HIGH (arg0),
12376 t = build_int_cst_wide (type, low, high);
12377 t = force_fit_type (t, 1,
12378 (overflow | TREE_OVERFLOW (arg0))
12379 && !TYPE_UNSIGNED (type),
12380 TREE_CONSTANT_OVERFLOW (arg0));
12385 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12389 gcc_unreachable ();
12395 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12396 an integer constant or real constant.
12398 TYPE is the type of the result. */
12401 fold_abs_const (tree arg0, tree type)
12403 tree t = NULL_TREE;
12405 switch (TREE_CODE (arg0))
12408 /* If the value is unsigned, then the absolute value is
12409 the same as the ordinary value. */
12410 if (TYPE_UNSIGNED (type))
12412 /* Similarly, if the value is non-negative. */
12413 else if (INT_CST_LT (integer_minus_one_node, arg0))
12415 /* If the value is negative, then the absolute value is
12419 unsigned HOST_WIDE_INT low;
12420 HOST_WIDE_INT high;
12421 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12422 TREE_INT_CST_HIGH (arg0),
12424 t = build_int_cst_wide (type, low, high);
12425 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12426 TREE_CONSTANT_OVERFLOW (arg0));
12431 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12432 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12438 gcc_unreachable ();
12444 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12445 constant. TYPE is the type of the result. */
12448 fold_not_const (tree arg0, tree type)
12450 tree t = NULL_TREE;
12452 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12454 t = build_int_cst_wide (type,
12455 ~ TREE_INT_CST_LOW (arg0),
12456 ~ TREE_INT_CST_HIGH (arg0));
12457 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12458 TREE_CONSTANT_OVERFLOW (arg0));
12463 /* Given CODE, a relational operator, the target type, TYPE and two
12464 constant operands OP0 and OP1, return the result of the
12465 relational operation. If the result is not a compile time
12466 constant, then return NULL_TREE. */
12469 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12471 int result, invert;
12473 /* From here on, the only cases we handle are when the result is
12474 known to be a constant. */
12476 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12478 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12479 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12481 /* Handle the cases where either operand is a NaN. */
12482 if (real_isnan (c0) || real_isnan (c1))
12492 case UNORDERED_EXPR:
12506 if (flag_trapping_math)
12512 gcc_unreachable ();
12515 return constant_boolean_node (result, type);
12518 return constant_boolean_node (real_compare (code, c0, c1), type);
12521 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12523 To compute GT, swap the arguments and do LT.
12524 To compute GE, do LT and invert the result.
12525 To compute LE, swap the arguments, do LT and invert the result.
12526 To compute NE, do EQ and invert the result.
12528 Therefore, the code below must handle only EQ and LT. */
12530 if (code == LE_EXPR || code == GT_EXPR)
12535 code = swap_tree_comparison (code);
12538 /* Note that it is safe to invert for real values here because we
12539 have already handled the one case that it matters. */
12542 if (code == NE_EXPR || code == GE_EXPR)
12545 code = invert_tree_comparison (code, false);
12548 /* Compute a result for LT or EQ if args permit;
12549 Otherwise return T. */
12550 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12552 if (code == EQ_EXPR)
12553 result = tree_int_cst_equal (op0, op1);
12554 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12555 result = INT_CST_LT_UNSIGNED (op0, op1);
12557 result = INT_CST_LT (op0, op1);
12564 return constant_boolean_node (result, type);
12567 /* Build an expression for the a clean point containing EXPR with type TYPE.
12568 Don't build a cleanup point expression for EXPR which don't have side
12572 fold_build_cleanup_point_expr (tree type, tree expr)
12574 /* If the expression does not have side effects then we don't have to wrap
12575 it with a cleanup point expression. */
12576 if (!TREE_SIDE_EFFECTS (expr))
12579 /* If the expression is a return, check to see if the expression inside the
12580 return has no side effects or the right hand side of the modify expression
12581 inside the return. If either don't have side effects set we don't need to
12582 wrap the expression in a cleanup point expression. Note we don't check the
12583 left hand side of the modify because it should always be a return decl. */
12584 if (TREE_CODE (expr) == RETURN_EXPR)
12586 tree op = TREE_OPERAND (expr, 0);
12587 if (!op || !TREE_SIDE_EFFECTS (op))
12589 op = TREE_OPERAND (op, 1);
12590 if (!TREE_SIDE_EFFECTS (op))
12594 return build1 (CLEANUP_POINT_EXPR, type, expr);
12597 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12598 avoid confusing the gimplify process. */
12601 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12603 /* The size of the object is not relevant when talking about its address. */
12604 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12605 t = TREE_OPERAND (t, 0);
12607 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12608 if (TREE_CODE (t) == INDIRECT_REF
12609 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12611 t = TREE_OPERAND (t, 0);
12612 if (TREE_TYPE (t) != ptrtype)
12613 t = build1 (NOP_EXPR, ptrtype, t);
12619 while (handled_component_p (base))
12620 base = TREE_OPERAND (base, 0);
12622 TREE_ADDRESSABLE (base) = 1;
12624 t = build1 (ADDR_EXPR, ptrtype, t);
12631 build_fold_addr_expr (tree t)
12633 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12636 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12637 of an indirection through OP0, or NULL_TREE if no simplification is
12641 fold_indirect_ref_1 (tree type, tree op0)
12647 subtype = TREE_TYPE (sub);
12648 if (!POINTER_TYPE_P (subtype))
12651 if (TREE_CODE (sub) == ADDR_EXPR)
12653 tree op = TREE_OPERAND (sub, 0);
12654 tree optype = TREE_TYPE (op);
12655 /* *&p => p; make sure to handle *&"str"[cst] here. */
12656 if (type == optype)
12658 tree fop = fold_read_from_constant_string (op);
12664 /* *(foo *)&fooarray => fooarray[0] */
12665 else if (TREE_CODE (optype) == ARRAY_TYPE
12666 && type == TREE_TYPE (optype))
12668 tree type_domain = TYPE_DOMAIN (optype);
12669 tree min_val = size_zero_node;
12670 if (type_domain && TYPE_MIN_VALUE (type_domain))
12671 min_val = TYPE_MIN_VALUE (type_domain);
12672 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12674 /* *(foo *)&complexfoo => __real__ complexfoo */
12675 else if (TREE_CODE (optype) == COMPLEX_TYPE
12676 && type == TREE_TYPE (optype))
12677 return fold_build1 (REALPART_EXPR, type, op);
12680 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12681 if (TREE_CODE (sub) == PLUS_EXPR
12682 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12684 tree op00 = TREE_OPERAND (sub, 0);
12685 tree op01 = TREE_OPERAND (sub, 1);
12689 op00type = TREE_TYPE (op00);
12690 if (TREE_CODE (op00) == ADDR_EXPR
12691 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12692 && type == TREE_TYPE (TREE_TYPE (op00type)))
12694 tree size = TYPE_SIZE_UNIT (type);
12695 if (tree_int_cst_equal (size, op01))
12696 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12700 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12701 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12702 && type == TREE_TYPE (TREE_TYPE (subtype)))
12705 tree min_val = size_zero_node;
12706 sub = build_fold_indirect_ref (sub);
12707 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12708 if (type_domain && TYPE_MIN_VALUE (type_domain))
12709 min_val = TYPE_MIN_VALUE (type_domain);
12710 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12716 /* Builds an expression for an indirection through T, simplifying some
12720 build_fold_indirect_ref (tree t)
12722 tree type = TREE_TYPE (TREE_TYPE (t));
12723 tree sub = fold_indirect_ref_1 (type, t);
12728 return build1 (INDIRECT_REF, type, t);
12731 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12734 fold_indirect_ref (tree t)
12736 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12744 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12745 whose result is ignored. The type of the returned tree need not be
12746 the same as the original expression. */
12749 fold_ignored_result (tree t)
12751 if (!TREE_SIDE_EFFECTS (t))
12752 return integer_zero_node;
12755 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12758 t = TREE_OPERAND (t, 0);
12762 case tcc_comparison:
12763 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12764 t = TREE_OPERAND (t, 0);
12765 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12766 t = TREE_OPERAND (t, 1);
12771 case tcc_expression:
12772 switch (TREE_CODE (t))
12774 case COMPOUND_EXPR:
12775 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12777 t = TREE_OPERAND (t, 0);
12781 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12782 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12784 t = TREE_OPERAND (t, 0);
12797 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12798 This can only be applied to objects of a sizetype. */
12801 round_up (tree value, int divisor)
12803 tree div = NULL_TREE;
12805 gcc_assert (divisor > 0);
12809 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12810 have to do anything. Only do this when we are not given a const,
12811 because in that case, this check is more expensive than just
12813 if (TREE_CODE (value) != INTEGER_CST)
12815 div = build_int_cst (TREE_TYPE (value), divisor);
12817 if (multiple_of_p (TREE_TYPE (value), value, div))
12821 /* If divisor is a power of two, simplify this to bit manipulation. */
12822 if (divisor == (divisor & -divisor))
12826 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12827 value = size_binop (PLUS_EXPR, value, t);
12828 t = build_int_cst (TREE_TYPE (value), -divisor);
12829 value = size_binop (BIT_AND_EXPR, value, t);
12834 div = build_int_cst (TREE_TYPE (value), divisor);
12835 value = size_binop (CEIL_DIV_EXPR, value, div);
12836 value = size_binop (MULT_EXPR, value, div);
12842 /* Likewise, but round down. */
12845 round_down (tree value, int divisor)
12847 tree div = NULL_TREE;
12849 gcc_assert (divisor > 0);
12853 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12854 have to do anything. Only do this when we are not given a const,
12855 because in that case, this check is more expensive than just
12857 if (TREE_CODE (value) != INTEGER_CST)
12859 div = build_int_cst (TREE_TYPE (value), divisor);
12861 if (multiple_of_p (TREE_TYPE (value), value, div))
12865 /* If divisor is a power of two, simplify this to bit manipulation. */
12866 if (divisor == (divisor & -divisor))
12870 t = build_int_cst (TREE_TYPE (value), -divisor);
12871 value = size_binop (BIT_AND_EXPR, value, t);
12876 div = build_int_cst (TREE_TYPE (value), divisor);
12877 value = size_binop (FLOOR_DIV_EXPR, value, div);
12878 value = size_binop (MULT_EXPR, value, div);
12884 /* Returns the pointer to the base of the object addressed by EXP and
12885 extracts the information about the offset of the access, storing it
12886 to PBITPOS and POFFSET. */
12889 split_address_to_core_and_offset (tree exp,
12890 HOST_WIDE_INT *pbitpos, tree *poffset)
12893 enum machine_mode mode;
12894 int unsignedp, volatilep;
12895 HOST_WIDE_INT bitsize;
12897 if (TREE_CODE (exp) == ADDR_EXPR)
12899 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12900 poffset, &mode, &unsignedp, &volatilep,
12902 core = build_fold_addr_expr (core);
12908 *poffset = NULL_TREE;
12914 /* Returns true if addresses of E1 and E2 differ by a constant, false
12915 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12918 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12921 HOST_WIDE_INT bitpos1, bitpos2;
12922 tree toffset1, toffset2, tdiff, type;
12924 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12925 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12927 if (bitpos1 % BITS_PER_UNIT != 0
12928 || bitpos2 % BITS_PER_UNIT != 0
12929 || !operand_equal_p (core1, core2, 0))
12932 if (toffset1 && toffset2)
12934 type = TREE_TYPE (toffset1);
12935 if (type != TREE_TYPE (toffset2))
12936 toffset2 = fold_convert (type, toffset2);
12938 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12939 if (!cst_and_fits_in_hwi (tdiff))
12942 *diff = int_cst_value (tdiff);
12944 else if (toffset1 || toffset2)
12946 /* If only one of the offsets is non-constant, the difference cannot
12953 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12957 /* Simplify the floating point expression EXP when the sign of the
12958 result is not significant. Return NULL_TREE if no simplification
12962 fold_strip_sign_ops (tree exp)
12966 switch (TREE_CODE (exp))
12970 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12971 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12975 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12977 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12978 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12979 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12980 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12981 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12982 arg1 ? arg1 : TREE_OPERAND (exp, 1));