1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 || (overflowable > 0 && sign_extended_type))
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
286 TREE_CONSTANT_OVERFLOW (t) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Each argument is given as two `HOST_WIDE_INT' pieces.
295 One argument is L1 and H1; the other, L2 and H2.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
299 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
300 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
301 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
303 unsigned HOST_WIDE_INT l;
307 h = h1 + h2 + (l < l1);
311 return OVERFLOW_SUM_SIGN (h1, h2, h);
314 /* Negate a doubleword integer with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
327 return (*hv & h1) < 0;
337 /* Multiply two doubleword integers with doubleword result.
338 Return nonzero if the operation overflows, assuming it's signed.
339 Each argument is given as two `HOST_WIDE_INT' pieces.
340 One argument is L1 and H1; the other, L2 and H2.
341 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
345 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 HOST_WIDE_INT arg1[4];
349 HOST_WIDE_INT arg2[4];
350 HOST_WIDE_INT prod[4 * 2];
351 unsigned HOST_WIDE_INT carry;
353 unsigned HOST_WIDE_INT toplow, neglow;
354 HOST_WIDE_INT tophigh, neghigh;
356 encode (arg1, l1, h1);
357 encode (arg2, l2, h2);
359 memset (prod, 0, sizeof prod);
361 for (i = 0; i < 4; i++)
364 for (j = 0; j < 4; j++)
367 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
368 carry += arg1[i] * arg2[j];
369 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
371 prod[k] = LOWPART (carry);
372 carry = HIGHPART (carry);
377 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
379 /* Check for overflow by calculating the top half of the answer in full;
380 it should agree with the low half's sign bit. */
381 decode (prod + 4, &toplow, &tophigh);
384 neg_double (l2, h2, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
389 neg_double (l1, h1, &neglow, &neghigh);
390 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
392 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
395 /* Shift the doubleword integer in L1, H1 left by COUNT places
396 keeping only PREC bits of result.
397 Shift right if COUNT is negative.
398 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
402 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
403 HOST_WIDE_INT count, unsigned int prec,
404 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
406 unsigned HOST_WIDE_INT signmask;
410 rshift_double (l1, h1, -count, prec, lv, hv, arith);
414 if (SHIFT_COUNT_TRUNCATED)
417 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
419 /* Shifting by the host word size is undefined according to the
420 ANSI standard, so we must handle this as a special case. */
424 else if (count >= HOST_BITS_PER_WIDE_INT)
426 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
431 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
432 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
436 /* Sign extend all bits that are beyond the precision. */
438 signmask = -((prec > HOST_BITS_PER_WIDE_INT
439 ? ((unsigned HOST_WIDE_INT) *hv
440 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
441 : (*lv >> (prec - 1))) & 1);
443 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
445 else if (prec >= HOST_BITS_PER_WIDE_INT)
447 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
448 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
453 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
454 *lv |= signmask << prec;
458 /* Shift the doubleword integer in L1, H1 right by COUNT places
459 keeping only PREC bits of result. COUNT must be positive.
460 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
461 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
464 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
465 HOST_WIDE_INT count, unsigned int prec,
466 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
469 unsigned HOST_WIDE_INT signmask;
472 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
475 if (SHIFT_COUNT_TRUNCATED)
478 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
480 /* Shifting by the host word size is undefined according to the
481 ANSI standard, so we must handle this as a special case. */
485 else if (count >= HOST_BITS_PER_WIDE_INT)
488 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
492 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
494 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
497 /* Zero / sign extend all bits that are beyond the precision. */
499 if (count >= (HOST_WIDE_INT)prec)
504 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
506 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
508 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
509 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
514 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
515 *lv |= signmask << (prec - count);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result.
521 Rotate right if COUNT is negative.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
525 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
526 HOST_WIDE_INT count, unsigned int prec,
527 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
529 unsigned HOST_WIDE_INT s1l, s2l;
530 HOST_WIDE_INT s1h, s2h;
536 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
537 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
542 /* Rotate the doubleword integer in L1, H1 left by COUNT places
543 keeping only PREC bits of result. COUNT must be positive.
544 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
547 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
548 HOST_WIDE_INT count, unsigned int prec,
549 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
551 unsigned HOST_WIDE_INT s1l, s2l;
552 HOST_WIDE_INT s1h, s2h;
558 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
559 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
564 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
565 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
566 CODE is a tree code for a kind of division, one of
567 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
569 It controls how the quotient is rounded to an integer.
570 Return nonzero if the operation overflows.
571 UNS nonzero says do unsigned division. */
574 div_and_round_double (enum tree_code code, int uns,
575 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
576 HOST_WIDE_INT hnum_orig,
577 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
578 HOST_WIDE_INT hden_orig,
579 unsigned HOST_WIDE_INT *lquo,
580 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
584 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
585 HOST_WIDE_INT den[4], quo[4];
587 unsigned HOST_WIDE_INT work;
588 unsigned HOST_WIDE_INT carry = 0;
589 unsigned HOST_WIDE_INT lnum = lnum_orig;
590 HOST_WIDE_INT hnum = hnum_orig;
591 unsigned HOST_WIDE_INT lden = lden_orig;
592 HOST_WIDE_INT hden = hden_orig;
595 if (hden == 0 && lden == 0)
596 overflow = 1, lden = 1;
598 /* Calculate quotient sign and convert operands to unsigned. */
604 /* (minimum integer) / (-1) is the only overflow case. */
605 if (neg_double (lnum, hnum, &lnum, &hnum)
606 && ((HOST_WIDE_INT) lden & hden) == -1)
612 neg_double (lden, hden, &lden, &hden);
616 if (hnum == 0 && hden == 0)
617 { /* single precision */
619 /* This unsigned division rounds toward zero. */
625 { /* trivial case: dividend < divisor */
626 /* hden != 0 already checked. */
633 memset (quo, 0, sizeof quo);
635 memset (num, 0, sizeof num); /* to zero 9th element */
636 memset (den, 0, sizeof den);
638 encode (num, lnum, hnum);
639 encode (den, lden, hden);
641 /* Special code for when the divisor < BASE. */
642 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
644 /* hnum != 0 already checked. */
645 for (i = 4 - 1; i >= 0; i--)
647 work = num[i] + carry * BASE;
648 quo[i] = work / lden;
654 /* Full double precision division,
655 with thanks to Don Knuth's "Seminumerical Algorithms". */
656 int num_hi_sig, den_hi_sig;
657 unsigned HOST_WIDE_INT quo_est, scale;
659 /* Find the highest nonzero divisor digit. */
660 for (i = 4 - 1;; i--)
667 /* Insure that the first digit of the divisor is at least BASE/2.
668 This is required by the quotient digit estimation algorithm. */
670 scale = BASE / (den[den_hi_sig] + 1);
672 { /* scale divisor and dividend */
674 for (i = 0; i <= 4 - 1; i++)
676 work = (num[i] * scale) + carry;
677 num[i] = LOWPART (work);
678 carry = HIGHPART (work);
683 for (i = 0; i <= 4 - 1; i++)
685 work = (den[i] * scale) + carry;
686 den[i] = LOWPART (work);
687 carry = HIGHPART (work);
688 if (den[i] != 0) den_hi_sig = i;
695 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
697 /* Guess the next quotient digit, quo_est, by dividing the first
698 two remaining dividend digits by the high order quotient digit.
699 quo_est is never low and is at most 2 high. */
700 unsigned HOST_WIDE_INT tmp;
702 num_hi_sig = i + den_hi_sig + 1;
703 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
704 if (num[num_hi_sig] != den[den_hi_sig])
705 quo_est = work / den[den_hi_sig];
709 /* Refine quo_est so it's usually correct, and at most one high. */
710 tmp = work - quo_est * den[den_hi_sig];
712 && (den[den_hi_sig - 1] * quo_est
713 > (tmp * BASE + num[num_hi_sig - 2])))
716 /* Try QUO_EST as the quotient digit, by multiplying the
717 divisor by QUO_EST and subtracting from the remaining dividend.
718 Keep in mind that QUO_EST is the I - 1st digit. */
721 for (j = 0; j <= den_hi_sig; j++)
723 work = quo_est * den[j] + carry;
724 carry = HIGHPART (work);
725 work = num[i + j] - LOWPART (work);
726 num[i + j] = LOWPART (work);
727 carry += HIGHPART (work) != 0;
730 /* If quo_est was high by one, then num[i] went negative and
731 we need to correct things. */
732 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
735 carry = 0; /* add divisor back in */
736 for (j = 0; j <= den_hi_sig; j++)
738 work = num[i + j] + den[j] + carry;
739 carry = HIGHPART (work);
740 num[i + j] = LOWPART (work);
743 num [num_hi_sig] += carry;
746 /* Store the quotient digit. */
751 decode (quo, lquo, hquo);
754 /* If result is negative, make it so. */
756 neg_double (*lquo, *hquo, lquo, hquo);
758 /* Compute trial remainder: rem = num - (quo * den) */
759 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
760 neg_double (*lrem, *hrem, lrem, hrem);
761 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
766 case TRUNC_MOD_EXPR: /* round toward zero */
767 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
771 case FLOOR_MOD_EXPR: /* round toward negative infinity */
772 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
775 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
783 case CEIL_MOD_EXPR: /* round toward positive infinity */
784 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
794 case ROUND_MOD_EXPR: /* round to closest integer */
796 unsigned HOST_WIDE_INT labs_rem = *lrem;
797 HOST_WIDE_INT habs_rem = *hrem;
798 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
799 HOST_WIDE_INT habs_den = hden, htwice;
801 /* Get absolute values. */
803 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
805 neg_double (lden, hden, &labs_den, &habs_den);
807 /* If (2 * abs (lrem) >= abs (lden)) */
808 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
809 labs_rem, habs_rem, <wice, &htwice);
811 if (((unsigned HOST_WIDE_INT) habs_den
812 < (unsigned HOST_WIDE_INT) htwice)
813 || (((unsigned HOST_WIDE_INT) habs_den
814 == (unsigned HOST_WIDE_INT) htwice)
815 && (labs_den < ltwice)))
819 add_double (*lquo, *hquo,
820 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
835 /* Compute true remainder: rem = num - (quo * den) */
836 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
837 neg_double (*lrem, *hrem, lrem, hrem);
838 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
842 /* If ARG2 divides ARG1 with zero remainder, carries out the division
843 of type CODE and returns the quotient.
844 Otherwise returns NULL_TREE. */
847 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
849 unsigned HOST_WIDE_INT int1l, int2l;
850 HOST_WIDE_INT int1h, int2h;
851 unsigned HOST_WIDE_INT quol, reml;
852 HOST_WIDE_INT quoh, remh;
853 tree type = TREE_TYPE (arg1);
854 int uns = TYPE_UNSIGNED (type);
856 int1l = TREE_INT_CST_LOW (arg1);
857 int1h = TREE_INT_CST_HIGH (arg1);
858 int2l = TREE_INT_CST_LOW (arg2);
859 int2h = TREE_INT_CST_HIGH (arg2);
861 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
862 &quol, &quoh, &reml, &remh);
863 if (remh != 0 || reml != 0)
866 return build_int_cst_wide (type, quol, quoh);
869 /* Return true if the built-in mathematical function specified by CODE
870 is odd, i.e. -f(x) == f(-x). */
873 negate_mathfn_p (enum built_in_function code)
877 CASE_FLT_FN (BUILT_IN_ASIN):
878 CASE_FLT_FN (BUILT_IN_ASINH):
879 CASE_FLT_FN (BUILT_IN_ATAN):
880 CASE_FLT_FN (BUILT_IN_ATANH):
881 CASE_FLT_FN (BUILT_IN_CBRT):
882 CASE_FLT_FN (BUILT_IN_SIN):
883 CASE_FLT_FN (BUILT_IN_SINH):
884 CASE_FLT_FN (BUILT_IN_TAN):
885 CASE_FLT_FN (BUILT_IN_TANH):
894 /* Check whether we may negate an integer constant T without causing
898 may_negate_without_overflow_p (tree t)
900 unsigned HOST_WIDE_INT val;
904 gcc_assert (TREE_CODE (t) == INTEGER_CST);
906 type = TREE_TYPE (t);
907 if (TYPE_UNSIGNED (type))
910 prec = TYPE_PRECISION (type);
911 if (prec > HOST_BITS_PER_WIDE_INT)
913 if (TREE_INT_CST_LOW (t) != 0)
915 prec -= HOST_BITS_PER_WIDE_INT;
916 val = TREE_INT_CST_HIGH (t);
919 val = TREE_INT_CST_LOW (t);
920 if (prec < HOST_BITS_PER_WIDE_INT)
921 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
922 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
925 /* Determine whether an expression T can be cheaply negated using
926 the function negate_expr without introducing undefined overflow. */
929 negate_expr_p (tree t)
936 type = TREE_TYPE (t);
939 switch (TREE_CODE (t))
942 if (TYPE_UNSIGNED (type)
943 || (flag_wrapv && ! flag_trapv))
946 /* Check that -CST will not overflow type. */
947 return may_negate_without_overflow_p (t);
949 return INTEGRAL_TYPE_P (type)
950 && (TYPE_UNSIGNED (type)
951 || (flag_wrapv && !flag_trapv));
958 return negate_expr_p (TREE_REALPART (t))
959 && negate_expr_p (TREE_IMAGPART (t));
962 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
964 /* -(A + B) -> (-B) - A. */
965 if (negate_expr_p (TREE_OPERAND (t, 1))
966 && reorder_operands_p (TREE_OPERAND (t, 0),
967 TREE_OPERAND (t, 1)))
969 /* -(A + B) -> (-A) - B. */
970 return negate_expr_p (TREE_OPERAND (t, 0));
973 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
974 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
975 && reorder_operands_p (TREE_OPERAND (t, 0),
976 TREE_OPERAND (t, 1));
979 if (TYPE_UNSIGNED (TREE_TYPE (t)))
985 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
986 return negate_expr_p (TREE_OPERAND (t, 1))
987 || negate_expr_p (TREE_OPERAND (t, 0));
995 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
997 return negate_expr_p (TREE_OPERAND (t, 1))
998 || negate_expr_p (TREE_OPERAND (t, 0));
1001 /* Negate -((double)float) as (double)(-float). */
1002 if (TREE_CODE (type) == REAL_TYPE)
1004 tree tem = strip_float_extensions (t);
1006 return negate_expr_p (tem);
1011 /* Negate -f(x) as f(-x). */
1012 if (negate_mathfn_p (builtin_mathfn_code (t)))
1013 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1017 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1018 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1020 tree op1 = TREE_OPERAND (t, 1);
1021 if (TREE_INT_CST_HIGH (op1) == 0
1022 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1023 == TREE_INT_CST_LOW (op1))
1034 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1035 simplification is possible.
1036 If negate_expr_p would return true for T, NULL_TREE will never be
1040 fold_negate_expr (tree t)
1042 tree type = TREE_TYPE (t);
1045 switch (TREE_CODE (t))
1047 /* Convert - (~A) to A + 1. */
1049 if (INTEGRAL_TYPE_P (type))
1050 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1051 build_int_cst (type, 1));
1055 tem = fold_negate_const (t, type);
1056 if (! TREE_OVERFLOW (tem)
1057 || TYPE_UNSIGNED (type)
1063 tem = fold_negate_const (t, type);
1064 /* Two's complement FP formats, such as c4x, may overflow. */
1065 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1071 tree rpart = negate_expr (TREE_REALPART (t));
1072 tree ipart = negate_expr (TREE_IMAGPART (t));
1074 if ((TREE_CODE (rpart) == REAL_CST
1075 && TREE_CODE (ipart) == REAL_CST)
1076 || (TREE_CODE (rpart) == INTEGER_CST
1077 && TREE_CODE (ipart) == INTEGER_CST))
1078 return build_complex (type, rpart, ipart);
1083 return TREE_OPERAND (t, 0);
1086 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1088 /* -(A + B) -> (-B) - A. */
1089 if (negate_expr_p (TREE_OPERAND (t, 1))
1090 && reorder_operands_p (TREE_OPERAND (t, 0),
1091 TREE_OPERAND (t, 1)))
1093 tem = negate_expr (TREE_OPERAND (t, 1));
1094 return fold_build2 (MINUS_EXPR, type,
1095 tem, TREE_OPERAND (t, 0));
1098 /* -(A + B) -> (-A) - B. */
1099 if (negate_expr_p (TREE_OPERAND (t, 0)))
1101 tem = negate_expr (TREE_OPERAND (t, 0));
1102 return fold_build2 (MINUS_EXPR, type,
1103 tem, TREE_OPERAND (t, 1));
1109 /* - (A - B) -> B - A */
1110 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1111 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1112 return fold_build2 (MINUS_EXPR, type,
1113 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1117 if (TYPE_UNSIGNED (type))
1123 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1125 tem = TREE_OPERAND (t, 1);
1126 if (negate_expr_p (tem))
1127 return fold_build2 (TREE_CODE (t), type,
1128 TREE_OPERAND (t, 0), negate_expr (tem));
1129 tem = TREE_OPERAND (t, 0);
1130 if (negate_expr_p (tem))
1131 return fold_build2 (TREE_CODE (t), type,
1132 negate_expr (tem), TREE_OPERAND (t, 1));
1136 case TRUNC_DIV_EXPR:
1137 case ROUND_DIV_EXPR:
1138 case FLOOR_DIV_EXPR:
1140 case EXACT_DIV_EXPR:
1141 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1143 tem = TREE_OPERAND (t, 1);
1144 if (negate_expr_p (tem))
1145 return fold_build2 (TREE_CODE (t), type,
1146 TREE_OPERAND (t, 0), negate_expr (tem));
1147 tem = TREE_OPERAND (t, 0);
1148 if (negate_expr_p (tem))
1149 return fold_build2 (TREE_CODE (t), type,
1150 negate_expr (tem), TREE_OPERAND (t, 1));
1155 /* Convert -((double)float) into (double)(-float). */
1156 if (TREE_CODE (type) == REAL_TYPE)
1158 tem = strip_float_extensions (t);
1159 if (tem != t && negate_expr_p (tem))
1160 return negate_expr (tem);
1165 /* Negate -f(x) as f(-x). */
1166 if (negate_mathfn_p (builtin_mathfn_code (t))
1167 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1169 tree fndecl, arg, arglist;
1171 fndecl = get_callee_fndecl (t);
1172 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1173 arglist = build_tree_list (NULL_TREE, arg);
1174 return build_function_call_expr (fndecl, arglist);
1179 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1180 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1182 tree op1 = TREE_OPERAND (t, 1);
1183 if (TREE_INT_CST_HIGH (op1) == 0
1184 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1185 == TREE_INT_CST_LOW (op1))
1187 tree ntype = TYPE_UNSIGNED (type)
1188 ? lang_hooks.types.signed_type (type)
1189 : lang_hooks.types.unsigned_type (type);
1190 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1191 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1192 return fold_convert (type, temp);
1204 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1205 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1206 return NULL_TREE. */
1209 negate_expr (tree t)
1216 type = TREE_TYPE (t);
1217 STRIP_SIGN_NOPS (t);
1219 tem = fold_negate_expr (t);
1221 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1222 return fold_convert (type, tem);
1225 /* Split a tree IN into a constant, literal and variable parts that could be
1226 combined with CODE to make IN. "constant" means an expression with
1227 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1228 commutative arithmetic operation. Store the constant part into *CONP,
1229 the literal in *LITP and return the variable part. If a part isn't
1230 present, set it to null. If the tree does not decompose in this way,
1231 return the entire tree as the variable part and the other parts as null.
1233 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1234 case, we negate an operand that was subtracted. Except if it is a
1235 literal for which we use *MINUS_LITP instead.
1237 If NEGATE_P is true, we are negating all of IN, again except a literal
1238 for which we use *MINUS_LITP instead.
1240 If IN is itself a literal or constant, return it as appropriate.
1242 Note that we do not guarantee that any of the three values will be the
1243 same type as IN, but they will have the same signedness and mode. */
1246 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1247 tree *minus_litp, int negate_p)
1255 /* Strip any conversions that don't change the machine mode or signedness. */
1256 STRIP_SIGN_NOPS (in);
1258 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1260 else if (TREE_CODE (in) == code
1261 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1262 /* We can associate addition and subtraction together (even
1263 though the C standard doesn't say so) for integers because
1264 the value is not affected. For reals, the value might be
1265 affected, so we can't. */
1266 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1267 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1269 tree op0 = TREE_OPERAND (in, 0);
1270 tree op1 = TREE_OPERAND (in, 1);
1271 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1272 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1274 /* First see if either of the operands is a literal, then a constant. */
1275 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1276 *litp = op0, op0 = 0;
1277 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1278 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1280 if (op0 != 0 && TREE_CONSTANT (op0))
1281 *conp = op0, op0 = 0;
1282 else if (op1 != 0 && TREE_CONSTANT (op1))
1283 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1285 /* If we haven't dealt with either operand, this is not a case we can
1286 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1287 if (op0 != 0 && op1 != 0)
1292 var = op1, neg_var_p = neg1_p;
1294 /* Now do any needed negations. */
1296 *minus_litp = *litp, *litp = 0;
1298 *conp = negate_expr (*conp);
1300 var = negate_expr (var);
1302 else if (TREE_CONSTANT (in))
1310 *minus_litp = *litp, *litp = 0;
1311 else if (*minus_litp)
1312 *litp = *minus_litp, *minus_litp = 0;
1313 *conp = negate_expr (*conp);
1314 var = negate_expr (var);
1320 /* Re-associate trees split by the above function. T1 and T2 are either
1321 expressions to associate or null. Return the new expression, if any. If
1322 we build an operation, do it in TYPE and with CODE. */
1325 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1332 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1333 try to fold this since we will have infinite recursion. But do
1334 deal with any NEGATE_EXPRs. */
1335 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1336 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1338 if (code == PLUS_EXPR)
1340 if (TREE_CODE (t1) == NEGATE_EXPR)
1341 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1342 fold_convert (type, TREE_OPERAND (t1, 0)));
1343 else if (TREE_CODE (t2) == NEGATE_EXPR)
1344 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1345 fold_convert (type, TREE_OPERAND (t2, 0)));
1346 else if (integer_zerop (t2))
1347 return fold_convert (type, t1);
1349 else if (code == MINUS_EXPR)
1351 if (integer_zerop (t2))
1352 return fold_convert (type, t1);
1355 return build2 (code, type, fold_convert (type, t1),
1356 fold_convert (type, t2));
1359 return fold_build2 (code, type, fold_convert (type, t1),
1360 fold_convert (type, t2));
1363 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1364 to produce a new constant. Return NULL_TREE if we don't know how
1365 to evaluate CODE at compile-time.
1367 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1370 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1372 unsigned HOST_WIDE_INT int1l, int2l;
1373 HOST_WIDE_INT int1h, int2h;
1374 unsigned HOST_WIDE_INT low;
1376 unsigned HOST_WIDE_INT garbagel;
1377 HOST_WIDE_INT garbageh;
1379 tree type = TREE_TYPE (arg1);
1380 int uns = TYPE_UNSIGNED (type);
1382 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1385 int1l = TREE_INT_CST_LOW (arg1);
1386 int1h = TREE_INT_CST_HIGH (arg1);
1387 int2l = TREE_INT_CST_LOW (arg2);
1388 int2h = TREE_INT_CST_HIGH (arg2);
1393 low = int1l | int2l, hi = int1h | int2h;
1397 low = int1l ^ int2l, hi = int1h ^ int2h;
1401 low = int1l & int2l, hi = int1h & int2h;
1407 /* It's unclear from the C standard whether shifts can overflow.
1408 The following code ignores overflow; perhaps a C standard
1409 interpretation ruling is needed. */
1410 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1417 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1422 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1426 neg_double (int2l, int2h, &low, &hi);
1427 add_double (int1l, int1h, low, hi, &low, &hi);
1428 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1432 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1435 case TRUNC_DIV_EXPR:
1436 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1437 case EXACT_DIV_EXPR:
1438 /* This is a shortcut for a common special case. */
1439 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1440 && ! TREE_CONSTANT_OVERFLOW (arg1)
1441 && ! TREE_CONSTANT_OVERFLOW (arg2)
1442 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1444 if (code == CEIL_DIV_EXPR)
1447 low = int1l / int2l, hi = 0;
1451 /* ... fall through ... */
1453 case ROUND_DIV_EXPR:
1454 if (int2h == 0 && int2l == 0)
1456 if (int2h == 0 && int2l == 1)
1458 low = int1l, hi = int1h;
1461 if (int1l == int2l && int1h == int2h
1462 && ! (int1l == 0 && int1h == 0))
1467 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1468 &low, &hi, &garbagel, &garbageh);
1471 case TRUNC_MOD_EXPR:
1472 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1473 /* This is a shortcut for a common special case. */
1474 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1475 && ! TREE_CONSTANT_OVERFLOW (arg1)
1476 && ! TREE_CONSTANT_OVERFLOW (arg2)
1477 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1479 if (code == CEIL_MOD_EXPR)
1481 low = int1l % int2l, hi = 0;
1485 /* ... fall through ... */
1487 case ROUND_MOD_EXPR:
1488 if (int2h == 0 && int2l == 0)
1490 overflow = div_and_round_double (code, uns,
1491 int1l, int1h, int2l, int2h,
1492 &garbagel, &garbageh, &low, &hi);
1498 low = (((unsigned HOST_WIDE_INT) int1h
1499 < (unsigned HOST_WIDE_INT) int2h)
1500 || (((unsigned HOST_WIDE_INT) int1h
1501 == (unsigned HOST_WIDE_INT) int2h)
1504 low = (int1h < int2h
1505 || (int1h == int2h && int1l < int2l));
1507 if (low == (code == MIN_EXPR))
1508 low = int1l, hi = int1h;
1510 low = int2l, hi = int2h;
1517 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1521 /* Propagate overflow flags ourselves. */
1522 if (((!uns || is_sizetype) && overflow)
1523 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1526 TREE_OVERFLOW (t) = 1;
1527 TREE_CONSTANT_OVERFLOW (t) = 1;
1529 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1532 TREE_CONSTANT_OVERFLOW (t) = 1;
1536 t = force_fit_type (t, 1,
1537 ((!uns || is_sizetype) && overflow)
1538 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1539 TREE_CONSTANT_OVERFLOW (arg1)
1540 | TREE_CONSTANT_OVERFLOW (arg2));
1545 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1546 constant. We assume ARG1 and ARG2 have the same data type, or at least
1547 are the same kind of constant and the same machine mode.
1549 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1552 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1557 if (TREE_CODE (arg1) == INTEGER_CST)
1558 return int_const_binop (code, arg1, arg2, notrunc);
1560 if (TREE_CODE (arg1) == REAL_CST)
1562 enum machine_mode mode;
1565 REAL_VALUE_TYPE value;
1566 REAL_VALUE_TYPE result;
1570 /* The following codes are handled by real_arithmetic. */
1585 d1 = TREE_REAL_CST (arg1);
1586 d2 = TREE_REAL_CST (arg2);
1588 type = TREE_TYPE (arg1);
1589 mode = TYPE_MODE (type);
1591 /* Don't perform operation if we honor signaling NaNs and
1592 either operand is a NaN. */
1593 if (HONOR_SNANS (mode)
1594 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1597 /* Don't perform operation if it would raise a division
1598 by zero exception. */
1599 if (code == RDIV_EXPR
1600 && REAL_VALUES_EQUAL (d2, dconst0)
1601 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1604 /* If either operand is a NaN, just return it. Otherwise, set up
1605 for floating-point trap; we return an overflow. */
1606 if (REAL_VALUE_ISNAN (d1))
1608 else if (REAL_VALUE_ISNAN (d2))
1611 inexact = real_arithmetic (&value, code, &d1, &d2);
1612 real_convert (&result, mode, &value);
1614 /* Don't constant fold this floating point operation if
1615 the result has overflowed and flag_trapping_math. */
1617 if (flag_trapping_math
1618 && MODE_HAS_INFINITIES (mode)
1619 && REAL_VALUE_ISINF (result)
1620 && !REAL_VALUE_ISINF (d1)
1621 && !REAL_VALUE_ISINF (d2))
1624 /* Don't constant fold this floating point operation if the
1625 result may dependent upon the run-time rounding mode and
1626 flag_rounding_math is set, or if GCC's software emulation
1627 is unable to accurately represent the result. */
1629 if ((flag_rounding_math
1630 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1631 && !flag_unsafe_math_optimizations))
1632 && (inexact || !real_identical (&result, &value)))
1635 t = build_real (type, result);
1637 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1638 TREE_CONSTANT_OVERFLOW (t)
1640 | TREE_CONSTANT_OVERFLOW (arg1)
1641 | TREE_CONSTANT_OVERFLOW (arg2);
1645 if (TREE_CODE (arg1) == COMPLEX_CST)
1647 tree type = TREE_TYPE (arg1);
1648 tree r1 = TREE_REALPART (arg1);
1649 tree i1 = TREE_IMAGPART (arg1);
1650 tree r2 = TREE_REALPART (arg2);
1651 tree i2 = TREE_IMAGPART (arg2);
1657 t = build_complex (type,
1658 const_binop (PLUS_EXPR, r1, r2, notrunc),
1659 const_binop (PLUS_EXPR, i1, i2, notrunc));
1663 t = build_complex (type,
1664 const_binop (MINUS_EXPR, r1, r2, notrunc),
1665 const_binop (MINUS_EXPR, i1, i2, notrunc));
1669 t = build_complex (type,
1670 const_binop (MINUS_EXPR,
1671 const_binop (MULT_EXPR,
1673 const_binop (MULT_EXPR,
1676 const_binop (PLUS_EXPR,
1677 const_binop (MULT_EXPR,
1679 const_binop (MULT_EXPR,
1686 tree t1, t2, real, imag;
1688 = const_binop (PLUS_EXPR,
1689 const_binop (MULT_EXPR, r2, r2, notrunc),
1690 const_binop (MULT_EXPR, i2, i2, notrunc),
1693 t1 = const_binop (PLUS_EXPR,
1694 const_binop (MULT_EXPR, r1, r2, notrunc),
1695 const_binop (MULT_EXPR, i1, i2, notrunc),
1697 t2 = const_binop (MINUS_EXPR,
1698 const_binop (MULT_EXPR, i1, r2, notrunc),
1699 const_binop (MULT_EXPR, r1, i2, notrunc),
1702 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1704 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1705 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1709 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1710 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1715 t = build_complex (type, real, imag);
1727 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1728 indicates which particular sizetype to create. */
1731 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1733 return build_int_cst (sizetype_tab[(int) kind], number);
1736 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1737 is a tree code. The type of the result is taken from the operands.
1738 Both must be the same type integer type and it must be a size type.
1739 If the operands are constant, so is the result. */
1742 size_binop (enum tree_code code, tree arg0, tree arg1)
1744 tree type = TREE_TYPE (arg0);
1746 if (arg0 == error_mark_node || arg1 == error_mark_node)
1747 return error_mark_node;
1749 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1750 && type == TREE_TYPE (arg1));
1752 /* Handle the special case of two integer constants faster. */
1753 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1755 /* And some specific cases even faster than that. */
1756 if (code == PLUS_EXPR && integer_zerop (arg0))
1758 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1759 && integer_zerop (arg1))
1761 else if (code == MULT_EXPR && integer_onep (arg0))
1764 /* Handle general case of two integer constants. */
1765 return int_const_binop (code, arg0, arg1, 0);
1768 return fold_build2 (code, type, arg0, arg1);
1771 /* Given two values, either both of sizetype or both of bitsizetype,
1772 compute the difference between the two values. Return the value
1773 in signed type corresponding to the type of the operands. */
1776 size_diffop (tree arg0, tree arg1)
1778 tree type = TREE_TYPE (arg0);
1781 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1782 && type == TREE_TYPE (arg1));
1784 /* If the type is already signed, just do the simple thing. */
1785 if (!TYPE_UNSIGNED (type))
1786 return size_binop (MINUS_EXPR, arg0, arg1);
1788 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1790 /* If either operand is not a constant, do the conversions to the signed
1791 type and subtract. The hardware will do the right thing with any
1792 overflow in the subtraction. */
1793 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1794 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1795 fold_convert (ctype, arg1));
1797 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1798 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1799 overflow) and negate (which can't either). Special-case a result
1800 of zero while we're here. */
1801 if (tree_int_cst_equal (arg0, arg1))
1802 return build_int_cst (ctype, 0);
1803 else if (tree_int_cst_lt (arg1, arg0))
1804 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1806 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1807 fold_convert (ctype, size_binop (MINUS_EXPR,
1811 /* A subroutine of fold_convert_const handling conversions of an
1812 INTEGER_CST to another integer type. */
1815 fold_convert_const_int_from_int (tree type, tree arg1)
1819 /* Given an integer constant, make new constant with new type,
1820 appropriately sign-extended or truncated. */
1821 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1822 TREE_INT_CST_HIGH (arg1));
1824 t = force_fit_type (t,
1825 /* Don't set the overflow when
1826 converting a pointer */
1827 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1828 (TREE_INT_CST_HIGH (arg1) < 0
1829 && (TYPE_UNSIGNED (type)
1830 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1831 | TREE_OVERFLOW (arg1),
1832 TREE_CONSTANT_OVERFLOW (arg1));
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to an integer type. */
1841 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1846 /* The following code implements the floating point to integer
1847 conversion rules required by the Java Language Specification,
1848 that IEEE NaNs are mapped to zero and values that overflow
1849 the target precision saturate, i.e. values greater than
1850 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1851 are mapped to INT_MIN. These semantics are allowed by the
1852 C and C++ standards that simply state that the behavior of
1853 FP-to-integer conversion is unspecified upon overflow. */
1855 HOST_WIDE_INT high, low;
1857 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1861 case FIX_TRUNC_EXPR:
1862 real_trunc (&r, VOIDmode, &x);
1866 real_ceil (&r, VOIDmode, &x);
1869 case FIX_FLOOR_EXPR:
1870 real_floor (&r, VOIDmode, &x);
1873 case FIX_ROUND_EXPR:
1874 real_round (&r, VOIDmode, &x);
1881 /* If R is NaN, return zero and show we have an overflow. */
1882 if (REAL_VALUE_ISNAN (r))
1889 /* See if R is less than the lower bound or greater than the
1894 tree lt = TYPE_MIN_VALUE (type);
1895 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1896 if (REAL_VALUES_LESS (r, l))
1899 high = TREE_INT_CST_HIGH (lt);
1900 low = TREE_INT_CST_LOW (lt);
1906 tree ut = TYPE_MAX_VALUE (type);
1909 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1910 if (REAL_VALUES_LESS (u, r))
1913 high = TREE_INT_CST_HIGH (ut);
1914 low = TREE_INT_CST_LOW (ut);
1920 REAL_VALUE_TO_INT (&low, &high, r);
1922 t = build_int_cst_wide (type, low, high);
1924 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1925 TREE_CONSTANT_OVERFLOW (arg1));
1929 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1930 to another floating point type. */
1933 fold_convert_const_real_from_real (tree type, tree arg1)
1935 REAL_VALUE_TYPE value;
1938 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1939 t = build_real (type, value);
1941 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1942 TREE_CONSTANT_OVERFLOW (t)
1943 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1947 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1948 type TYPE. If no simplification can be done return NULL_TREE. */
1951 fold_convert_const (enum tree_code code, tree type, tree arg1)
1953 if (TREE_TYPE (arg1) == type)
1956 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1958 if (TREE_CODE (arg1) == INTEGER_CST)
1959 return fold_convert_const_int_from_int (type, arg1);
1960 else if (TREE_CODE (arg1) == REAL_CST)
1961 return fold_convert_const_int_from_real (code, type, arg1);
1963 else if (TREE_CODE (type) == REAL_TYPE)
1965 if (TREE_CODE (arg1) == INTEGER_CST)
1966 return build_real_from_int_cst (type, arg1);
1967 if (TREE_CODE (arg1) == REAL_CST)
1968 return fold_convert_const_real_from_real (type, arg1);
1973 /* Construct a vector of zero elements of vector type TYPE. */
1976 build_zero_vector (tree type)
1981 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1982 units = TYPE_VECTOR_SUBPARTS (type);
1985 for (i = 0; i < units; i++)
1986 list = tree_cons (NULL_TREE, elem, list);
1987 return build_vector (type, list);
1990 /* Convert expression ARG to type TYPE. Used by the middle-end for
1991 simple conversions in preference to calling the front-end's convert. */
1994 fold_convert (tree type, tree arg)
1996 tree orig = TREE_TYPE (arg);
2002 if (TREE_CODE (arg) == ERROR_MARK
2003 || TREE_CODE (type) == ERROR_MARK
2004 || TREE_CODE (orig) == ERROR_MARK)
2005 return error_mark_node;
2007 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2008 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2009 TYPE_MAIN_VARIANT (orig)))
2010 return fold_build1 (NOP_EXPR, type, arg);
2012 switch (TREE_CODE (type))
2014 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2015 case POINTER_TYPE: case REFERENCE_TYPE:
2017 if (TREE_CODE (arg) == INTEGER_CST)
2019 tem = fold_convert_const (NOP_EXPR, type, arg);
2020 if (tem != NULL_TREE)
2023 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2024 || TREE_CODE (orig) == OFFSET_TYPE)
2025 return fold_build1 (NOP_EXPR, type, arg);
2026 if (TREE_CODE (orig) == COMPLEX_TYPE)
2028 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2029 return fold_convert (type, tem);
2031 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2032 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2033 return fold_build1 (NOP_EXPR, type, arg);
2036 if (TREE_CODE (arg) == INTEGER_CST)
2038 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2039 if (tem != NULL_TREE)
2042 else if (TREE_CODE (arg) == REAL_CST)
2044 tem = fold_convert_const (NOP_EXPR, type, arg);
2045 if (tem != NULL_TREE)
2049 switch (TREE_CODE (orig))
2052 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2053 case POINTER_TYPE: case REFERENCE_TYPE:
2054 return fold_build1 (FLOAT_EXPR, type, arg);
2057 return fold_build1 (NOP_EXPR, type, arg);
2060 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2061 return fold_convert (type, tem);
2068 switch (TREE_CODE (orig))
2071 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2072 case POINTER_TYPE: case REFERENCE_TYPE:
2074 return build2 (COMPLEX_EXPR, type,
2075 fold_convert (TREE_TYPE (type), arg),
2076 fold_convert (TREE_TYPE (type), integer_zero_node));
2081 if (TREE_CODE (arg) == COMPLEX_EXPR)
2083 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2084 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2085 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2088 arg = save_expr (arg);
2089 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2090 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2091 rpart = fold_convert (TREE_TYPE (type), rpart);
2092 ipart = fold_convert (TREE_TYPE (type), ipart);
2093 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2101 if (integer_zerop (arg))
2102 return build_zero_vector (type);
2103 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2104 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2105 || TREE_CODE (orig) == VECTOR_TYPE);
2106 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2109 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2116 /* Return false if expr can be assumed not to be an lvalue, true
2120 maybe_lvalue_p (tree x)
2122 /* We only need to wrap lvalue tree codes. */
2123 switch (TREE_CODE (x))
2134 case ALIGN_INDIRECT_REF:
2135 case MISALIGNED_INDIRECT_REF:
2137 case ARRAY_RANGE_REF:
2143 case PREINCREMENT_EXPR:
2144 case PREDECREMENT_EXPR:
2146 case TRY_CATCH_EXPR:
2147 case WITH_CLEANUP_EXPR:
2158 /* Assume the worst for front-end tree codes. */
2159 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2167 /* Return an expr equal to X but certainly not valid as an lvalue. */
2172 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2177 if (! maybe_lvalue_p (x))
2179 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2182 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2183 Zero means allow extended lvalues. */
2185 int pedantic_lvalues;
2187 /* When pedantic, return an expr equal to X but certainly not valid as a
2188 pedantic lvalue. Otherwise, return X. */
2191 pedantic_non_lvalue (tree x)
2193 if (pedantic_lvalues)
2194 return non_lvalue (x);
2199 /* Given a tree comparison code, return the code that is the logical inverse
2200 of the given code. It is not safe to do this for floating-point
2201 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2202 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2205 invert_tree_comparison (enum tree_code code, bool honor_nans)
2207 if (honor_nans && flag_trapping_math)
2217 return honor_nans ? UNLE_EXPR : LE_EXPR;
2219 return honor_nans ? UNLT_EXPR : LT_EXPR;
2221 return honor_nans ? UNGE_EXPR : GE_EXPR;
2223 return honor_nans ? UNGT_EXPR : GT_EXPR;
2237 return UNORDERED_EXPR;
2238 case UNORDERED_EXPR:
2239 return ORDERED_EXPR;
2245 /* Similar, but return the comparison that results if the operands are
2246 swapped. This is safe for floating-point. */
2249 swap_tree_comparison (enum tree_code code)
2256 case UNORDERED_EXPR:
2282 /* Convert a comparison tree code from an enum tree_code representation
2283 into a compcode bit-based encoding. This function is the inverse of
2284 compcode_to_comparison. */
2286 static enum comparison_code
2287 comparison_to_compcode (enum tree_code code)
2304 return COMPCODE_ORD;
2305 case UNORDERED_EXPR:
2306 return COMPCODE_UNORD;
2308 return COMPCODE_UNLT;
2310 return COMPCODE_UNEQ;
2312 return COMPCODE_UNLE;
2314 return COMPCODE_UNGT;
2316 return COMPCODE_LTGT;
2318 return COMPCODE_UNGE;
2324 /* Convert a compcode bit-based encoding of a comparison operator back
2325 to GCC's enum tree_code representation. This function is the
2326 inverse of comparison_to_compcode. */
2328 static enum tree_code
2329 compcode_to_comparison (enum comparison_code code)
2346 return ORDERED_EXPR;
2347 case COMPCODE_UNORD:
2348 return UNORDERED_EXPR;
2366 /* Return a tree for the comparison which is the combination of
2367 doing the AND or OR (depending on CODE) of the two operations LCODE
2368 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2369 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2370 if this makes the transformation invalid. */
2373 combine_comparisons (enum tree_code code, enum tree_code lcode,
2374 enum tree_code rcode, tree truth_type,
2375 tree ll_arg, tree lr_arg)
2377 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2378 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2379 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2380 enum comparison_code compcode;
2384 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2385 compcode = lcompcode & rcompcode;
2388 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2389 compcode = lcompcode | rcompcode;
2398 /* Eliminate unordered comparisons, as well as LTGT and ORD
2399 which are not used unless the mode has NaNs. */
2400 compcode &= ~COMPCODE_UNORD;
2401 if (compcode == COMPCODE_LTGT)
2402 compcode = COMPCODE_NE;
2403 else if (compcode == COMPCODE_ORD)
2404 compcode = COMPCODE_TRUE;
2406 else if (flag_trapping_math)
2408 /* Check that the original operation and the optimized ones will trap
2409 under the same condition. */
2410 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2411 && (lcompcode != COMPCODE_EQ)
2412 && (lcompcode != COMPCODE_ORD);
2413 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2414 && (rcompcode != COMPCODE_EQ)
2415 && (rcompcode != COMPCODE_ORD);
2416 bool trap = (compcode & COMPCODE_UNORD) == 0
2417 && (compcode != COMPCODE_EQ)
2418 && (compcode != COMPCODE_ORD);
2420 /* In a short-circuited boolean expression the LHS might be
2421 such that the RHS, if evaluated, will never trap. For
2422 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2423 if neither x nor y is NaN. (This is a mixed blessing: for
2424 example, the expression above will never trap, hence
2425 optimizing it to x < y would be invalid). */
2426 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2427 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2430 /* If the comparison was short-circuited, and only the RHS
2431 trapped, we may now generate a spurious trap. */
2433 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2436 /* If we changed the conditions that cause a trap, we lose. */
2437 if ((ltrap || rtrap) != trap)
2441 if (compcode == COMPCODE_TRUE)
2442 return constant_boolean_node (true, truth_type);
2443 else if (compcode == COMPCODE_FALSE)
2444 return constant_boolean_node (false, truth_type);
2446 return fold_build2 (compcode_to_comparison (compcode),
2447 truth_type, ll_arg, lr_arg);
2450 /* Return nonzero if CODE is a tree code that represents a truth value. */
2453 truth_value_p (enum tree_code code)
2455 return (TREE_CODE_CLASS (code) == tcc_comparison
2456 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2457 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2458 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2461 /* Return nonzero if two operands (typically of the same tree node)
2462 are necessarily equal. If either argument has side-effects this
2463 function returns zero. FLAGS modifies behavior as follows:
2465 If OEP_ONLY_CONST is set, only return nonzero for constants.
2466 This function tests whether the operands are indistinguishable;
2467 it does not test whether they are equal using C's == operation.
2468 The distinction is important for IEEE floating point, because
2469 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2470 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2472 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2473 even though it may hold multiple values during a function.
2474 This is because a GCC tree node guarantees that nothing else is
2475 executed between the evaluation of its "operands" (which may often
2476 be evaluated in arbitrary order). Hence if the operands themselves
2477 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2478 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2479 unset means assuming isochronic (or instantaneous) tree equivalence.
2480 Unless comparing arbitrary expression trees, such as from different
2481 statements, this flag can usually be left unset.
2483 If OEP_PURE_SAME is set, then pure functions with identical arguments
2484 are considered the same. It is used when the caller has other ways
2485 to ensure that global memory is unchanged in between. */
2488 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2490 /* If either is ERROR_MARK, they aren't equal. */
2491 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2494 /* If both types don't have the same signedness, then we can't consider
2495 them equal. We must check this before the STRIP_NOPS calls
2496 because they may change the signedness of the arguments. */
2497 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2500 /* If both types don't have the same precision, then it is not safe
2502 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2508 /* In case both args are comparisons but with different comparison
2509 code, try to swap the comparison operands of one arg to produce
2510 a match and compare that variant. */
2511 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2512 && COMPARISON_CLASS_P (arg0)
2513 && COMPARISON_CLASS_P (arg1))
2515 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2517 if (TREE_CODE (arg0) == swap_code)
2518 return operand_equal_p (TREE_OPERAND (arg0, 0),
2519 TREE_OPERAND (arg1, 1), flags)
2520 && operand_equal_p (TREE_OPERAND (arg0, 1),
2521 TREE_OPERAND (arg1, 0), flags);
2524 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2525 /* This is needed for conversions and for COMPONENT_REF.
2526 Might as well play it safe and always test this. */
2527 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2528 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2529 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2532 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2533 We don't care about side effects in that case because the SAVE_EXPR
2534 takes care of that for us. In all other cases, two expressions are
2535 equal if they have no side effects. If we have two identical
2536 expressions with side effects that should be treated the same due
2537 to the only side effects being identical SAVE_EXPR's, that will
2538 be detected in the recursive calls below. */
2539 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2540 && (TREE_CODE (arg0) == SAVE_EXPR
2541 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2544 /* Next handle constant cases, those for which we can return 1 even
2545 if ONLY_CONST is set. */
2546 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2547 switch (TREE_CODE (arg0))
2550 return (! TREE_CONSTANT_OVERFLOW (arg0)
2551 && ! TREE_CONSTANT_OVERFLOW (arg1)
2552 && tree_int_cst_equal (arg0, arg1));
2555 return (! TREE_CONSTANT_OVERFLOW (arg0)
2556 && ! TREE_CONSTANT_OVERFLOW (arg1)
2557 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2558 TREE_REAL_CST (arg1)));
2564 if (TREE_CONSTANT_OVERFLOW (arg0)
2565 || TREE_CONSTANT_OVERFLOW (arg1))
2568 v1 = TREE_VECTOR_CST_ELTS (arg0);
2569 v2 = TREE_VECTOR_CST_ELTS (arg1);
2572 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2575 v1 = TREE_CHAIN (v1);
2576 v2 = TREE_CHAIN (v2);
2583 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2585 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2589 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2590 && ! memcmp (TREE_STRING_POINTER (arg0),
2591 TREE_STRING_POINTER (arg1),
2592 TREE_STRING_LENGTH (arg0)));
2595 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2601 if (flags & OEP_ONLY_CONST)
2604 /* Define macros to test an operand from arg0 and arg1 for equality and a
2605 variant that allows null and views null as being different from any
2606 non-null value. In the latter case, if either is null, the both
2607 must be; otherwise, do the normal comparison. */
2608 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2609 TREE_OPERAND (arg1, N), flags)
2611 #define OP_SAME_WITH_NULL(N) \
2612 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2613 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2615 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2618 /* Two conversions are equal only if signedness and modes match. */
2619 switch (TREE_CODE (arg0))
2624 case FIX_TRUNC_EXPR:
2625 case FIX_FLOOR_EXPR:
2626 case FIX_ROUND_EXPR:
2627 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2628 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2638 case tcc_comparison:
2640 if (OP_SAME (0) && OP_SAME (1))
2643 /* For commutative ops, allow the other order. */
2644 return (commutative_tree_code (TREE_CODE (arg0))
2645 && operand_equal_p (TREE_OPERAND (arg0, 0),
2646 TREE_OPERAND (arg1, 1), flags)
2647 && operand_equal_p (TREE_OPERAND (arg0, 1),
2648 TREE_OPERAND (arg1, 0), flags));
2651 /* If either of the pointer (or reference) expressions we are
2652 dereferencing contain a side effect, these cannot be equal. */
2653 if (TREE_SIDE_EFFECTS (arg0)
2654 || TREE_SIDE_EFFECTS (arg1))
2657 switch (TREE_CODE (arg0))
2660 case ALIGN_INDIRECT_REF:
2661 case MISALIGNED_INDIRECT_REF:
2667 case ARRAY_RANGE_REF:
2668 /* Operands 2 and 3 may be null. */
2671 && OP_SAME_WITH_NULL (2)
2672 && OP_SAME_WITH_NULL (3));
2675 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2676 may be NULL when we're called to compare MEM_EXPRs. */
2677 return OP_SAME_WITH_NULL (0)
2679 && OP_SAME_WITH_NULL (2);
2682 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2688 case tcc_expression:
2689 switch (TREE_CODE (arg0))
2692 case TRUTH_NOT_EXPR:
2695 case TRUTH_ANDIF_EXPR:
2696 case TRUTH_ORIF_EXPR:
2697 return OP_SAME (0) && OP_SAME (1);
2699 case TRUTH_AND_EXPR:
2701 case TRUTH_XOR_EXPR:
2702 if (OP_SAME (0) && OP_SAME (1))
2705 /* Otherwise take into account this is a commutative operation. */
2706 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2707 TREE_OPERAND (arg1, 1), flags)
2708 && operand_equal_p (TREE_OPERAND (arg0, 1),
2709 TREE_OPERAND (arg1, 0), flags));
2712 /* If the CALL_EXPRs call different functions, then they
2713 clearly can not be equal. */
2718 unsigned int cef = call_expr_flags (arg0);
2719 if (flags & OEP_PURE_SAME)
2720 cef &= ECF_CONST | ECF_PURE;
2727 /* Now see if all the arguments are the same. operand_equal_p
2728 does not handle TREE_LIST, so we walk the operands here
2729 feeding them to operand_equal_p. */
2730 arg0 = TREE_OPERAND (arg0, 1);
2731 arg1 = TREE_OPERAND (arg1, 1);
2732 while (arg0 && arg1)
2734 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2738 arg0 = TREE_CHAIN (arg0);
2739 arg1 = TREE_CHAIN (arg1);
2742 /* If we get here and both argument lists are exhausted
2743 then the CALL_EXPRs are equal. */
2744 return ! (arg0 || arg1);
2750 case tcc_declaration:
2751 /* Consider __builtin_sqrt equal to sqrt. */
2752 return (TREE_CODE (arg0) == FUNCTION_DECL
2753 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2754 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2755 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2762 #undef OP_SAME_WITH_NULL
2765 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2766 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2768 When in doubt, return 0. */
2771 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2773 int unsignedp1, unsignedpo;
2774 tree primarg0, primarg1, primother;
2775 unsigned int correct_width;
2777 if (operand_equal_p (arg0, arg1, 0))
2780 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2781 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2784 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2785 and see if the inner values are the same. This removes any
2786 signedness comparison, which doesn't matter here. */
2787 primarg0 = arg0, primarg1 = arg1;
2788 STRIP_NOPS (primarg0);
2789 STRIP_NOPS (primarg1);
2790 if (operand_equal_p (primarg0, primarg1, 0))
2793 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2794 actual comparison operand, ARG0.
2796 First throw away any conversions to wider types
2797 already present in the operands. */
2799 primarg1 = get_narrower (arg1, &unsignedp1);
2800 primother = get_narrower (other, &unsignedpo);
2802 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2803 if (unsignedp1 == unsignedpo
2804 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2805 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2807 tree type = TREE_TYPE (arg0);
2809 /* Make sure shorter operand is extended the right way
2810 to match the longer operand. */
2811 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2812 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2814 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2821 /* See if ARG is an expression that is either a comparison or is performing
2822 arithmetic on comparisons. The comparisons must only be comparing
2823 two different values, which will be stored in *CVAL1 and *CVAL2; if
2824 they are nonzero it means that some operands have already been found.
2825 No variables may be used anywhere else in the expression except in the
2826 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2827 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2829 If this is true, return 1. Otherwise, return zero. */
2832 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2834 enum tree_code code = TREE_CODE (arg);
2835 enum tree_code_class class = TREE_CODE_CLASS (code);
2837 /* We can handle some of the tcc_expression cases here. */
2838 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2840 else if (class == tcc_expression
2841 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2842 || code == COMPOUND_EXPR))
2845 else if (class == tcc_expression && code == SAVE_EXPR
2846 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2848 /* If we've already found a CVAL1 or CVAL2, this expression is
2849 two complex to handle. */
2850 if (*cval1 || *cval2)
2860 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2863 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2864 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2865 cval1, cval2, save_p));
2870 case tcc_expression:
2871 if (code == COND_EXPR)
2872 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2873 cval1, cval2, save_p)
2874 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2875 cval1, cval2, save_p)
2876 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2877 cval1, cval2, save_p));
2880 case tcc_comparison:
2881 /* First see if we can handle the first operand, then the second. For
2882 the second operand, we know *CVAL1 can't be zero. It must be that
2883 one side of the comparison is each of the values; test for the
2884 case where this isn't true by failing if the two operands
2887 if (operand_equal_p (TREE_OPERAND (arg, 0),
2888 TREE_OPERAND (arg, 1), 0))
2892 *cval1 = TREE_OPERAND (arg, 0);
2893 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2895 else if (*cval2 == 0)
2896 *cval2 = TREE_OPERAND (arg, 0);
2897 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2902 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2904 else if (*cval2 == 0)
2905 *cval2 = TREE_OPERAND (arg, 1);
2906 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2918 /* ARG is a tree that is known to contain just arithmetic operations and
2919 comparisons. Evaluate the operations in the tree substituting NEW0 for
2920 any occurrence of OLD0 as an operand of a comparison and likewise for
2924 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2926 tree type = TREE_TYPE (arg);
2927 enum tree_code code = TREE_CODE (arg);
2928 enum tree_code_class class = TREE_CODE_CLASS (code);
2930 /* We can handle some of the tcc_expression cases here. */
2931 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2933 else if (class == tcc_expression
2934 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2940 return fold_build1 (code, type,
2941 eval_subst (TREE_OPERAND (arg, 0),
2942 old0, new0, old1, new1));
2945 return fold_build2 (code, type,
2946 eval_subst (TREE_OPERAND (arg, 0),
2947 old0, new0, old1, new1),
2948 eval_subst (TREE_OPERAND (arg, 1),
2949 old0, new0, old1, new1));
2951 case tcc_expression:
2955 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2958 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2961 return fold_build3 (code, type,
2962 eval_subst (TREE_OPERAND (arg, 0),
2963 old0, new0, old1, new1),
2964 eval_subst (TREE_OPERAND (arg, 1),
2965 old0, new0, old1, new1),
2966 eval_subst (TREE_OPERAND (arg, 2),
2967 old0, new0, old1, new1));
2971 /* Fall through - ??? */
2973 case tcc_comparison:
2975 tree arg0 = TREE_OPERAND (arg, 0);
2976 tree arg1 = TREE_OPERAND (arg, 1);
2978 /* We need to check both for exact equality and tree equality. The
2979 former will be true if the operand has a side-effect. In that
2980 case, we know the operand occurred exactly once. */
2982 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2984 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2987 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2989 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2992 return fold_build2 (code, type, arg0, arg1);
3000 /* Return a tree for the case when the result of an expression is RESULT
3001 converted to TYPE and OMITTED was previously an operand of the expression
3002 but is now not needed (e.g., we folded OMITTED * 0).
3004 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3005 the conversion of RESULT to TYPE. */
3008 omit_one_operand (tree type, tree result, tree omitted)
3010 tree t = fold_convert (type, result);
3012 if (TREE_SIDE_EFFECTS (omitted))
3013 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3015 return non_lvalue (t);
3018 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3021 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3023 tree t = fold_convert (type, result);
3025 if (TREE_SIDE_EFFECTS (omitted))
3026 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3028 return pedantic_non_lvalue (t);
3031 /* Return a tree for the case when the result of an expression is RESULT
3032 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3033 of the expression but are now not needed.
3035 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3036 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3037 evaluated before OMITTED2. Otherwise, if neither has side effects,
3038 just do the conversion of RESULT to TYPE. */
3041 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3043 tree t = fold_convert (type, result);
3045 if (TREE_SIDE_EFFECTS (omitted2))
3046 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3047 if (TREE_SIDE_EFFECTS (omitted1))
3048 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3050 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3054 /* Return a simplified tree node for the truth-negation of ARG. This
3055 never alters ARG itself. We assume that ARG is an operation that
3056 returns a truth value (0 or 1).
3058 FIXME: one would think we would fold the result, but it causes
3059 problems with the dominator optimizer. */
3062 fold_truth_not_expr (tree arg)
3064 tree type = TREE_TYPE (arg);
3065 enum tree_code code = TREE_CODE (arg);
3067 /* If this is a comparison, we can simply invert it, except for
3068 floating-point non-equality comparisons, in which case we just
3069 enclose a TRUTH_NOT_EXPR around what we have. */
3071 if (TREE_CODE_CLASS (code) == tcc_comparison)
3073 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3074 if (FLOAT_TYPE_P (op_type)
3075 && flag_trapping_math
3076 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3077 && code != NE_EXPR && code != EQ_EXPR)
3081 code = invert_tree_comparison (code,
3082 HONOR_NANS (TYPE_MODE (op_type)));
3083 if (code == ERROR_MARK)
3086 return build2 (code, type,
3087 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3094 return constant_boolean_node (integer_zerop (arg), type);
3096 case TRUTH_AND_EXPR:
3097 return build2 (TRUTH_OR_EXPR, type,
3098 invert_truthvalue (TREE_OPERAND (arg, 0)),
3099 invert_truthvalue (TREE_OPERAND (arg, 1)));
3102 return build2 (TRUTH_AND_EXPR, type,
3103 invert_truthvalue (TREE_OPERAND (arg, 0)),
3104 invert_truthvalue (TREE_OPERAND (arg, 1)));
3106 case TRUTH_XOR_EXPR:
3107 /* Here we can invert either operand. We invert the first operand
3108 unless the second operand is a TRUTH_NOT_EXPR in which case our
3109 result is the XOR of the first operand with the inside of the
3110 negation of the second operand. */
3112 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3113 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3114 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3116 return build2 (TRUTH_XOR_EXPR, type,
3117 invert_truthvalue (TREE_OPERAND (arg, 0)),
3118 TREE_OPERAND (arg, 1));
3120 case TRUTH_ANDIF_EXPR:
3121 return build2 (TRUTH_ORIF_EXPR, type,
3122 invert_truthvalue (TREE_OPERAND (arg, 0)),
3123 invert_truthvalue (TREE_OPERAND (arg, 1)));
3125 case TRUTH_ORIF_EXPR:
3126 return build2 (TRUTH_ANDIF_EXPR, type,
3127 invert_truthvalue (TREE_OPERAND (arg, 0)),
3128 invert_truthvalue (TREE_OPERAND (arg, 1)));
3130 case TRUTH_NOT_EXPR:
3131 return TREE_OPERAND (arg, 0);
3135 tree arg1 = TREE_OPERAND (arg, 1);
3136 tree arg2 = TREE_OPERAND (arg, 2);
3137 /* A COND_EXPR may have a throw as one operand, which
3138 then has void type. Just leave void operands
3140 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3141 VOID_TYPE_P (TREE_TYPE (arg1))
3142 ? arg1 : invert_truthvalue (arg1),
3143 VOID_TYPE_P (TREE_TYPE (arg2))
3144 ? arg2 : invert_truthvalue (arg2));
3148 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3149 invert_truthvalue (TREE_OPERAND (arg, 1)));
3151 case NON_LVALUE_EXPR:
3152 return invert_truthvalue (TREE_OPERAND (arg, 0));
3155 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3156 return build1 (TRUTH_NOT_EXPR, type, arg);
3160 return build1 (TREE_CODE (arg), type,
3161 invert_truthvalue (TREE_OPERAND (arg, 0)));
3164 if (!integer_onep (TREE_OPERAND (arg, 1)))
3166 return build2 (EQ_EXPR, type, arg,
3167 build_int_cst (type, 0));
3170 return build1 (TRUTH_NOT_EXPR, type, arg);
3172 case CLEANUP_POINT_EXPR:
3173 return build1 (CLEANUP_POINT_EXPR, type,
3174 invert_truthvalue (TREE_OPERAND (arg, 0)));
3183 /* Return a simplified tree node for the truth-negation of ARG. This
3184 never alters ARG itself. We assume that ARG is an operation that
3185 returns a truth value (0 or 1).
3187 FIXME: one would think we would fold the result, but it causes
3188 problems with the dominator optimizer. */
3191 invert_truthvalue (tree arg)
3195 if (TREE_CODE (arg) == ERROR_MARK)
3198 tem = fold_truth_not_expr (arg);
3200 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3205 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3206 operands are another bit-wise operation with a common input. If so,
3207 distribute the bit operations to save an operation and possibly two if
3208 constants are involved. For example, convert
3209 (A | B) & (A | C) into A | (B & C)
3210 Further simplification will occur if B and C are constants.
3212 If this optimization cannot be done, 0 will be returned. */
3215 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3220 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3221 || TREE_CODE (arg0) == code
3222 || (TREE_CODE (arg0) != BIT_AND_EXPR
3223 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3226 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3228 common = TREE_OPERAND (arg0, 0);
3229 left = TREE_OPERAND (arg0, 1);
3230 right = TREE_OPERAND (arg1, 1);
3232 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3234 common = TREE_OPERAND (arg0, 0);
3235 left = TREE_OPERAND (arg0, 1);
3236 right = TREE_OPERAND (arg1, 0);
3238 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3240 common = TREE_OPERAND (arg0, 1);
3241 left = TREE_OPERAND (arg0, 0);
3242 right = TREE_OPERAND (arg1, 1);
3244 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3246 common = TREE_OPERAND (arg0, 1);
3247 left = TREE_OPERAND (arg0, 0);
3248 right = TREE_OPERAND (arg1, 0);
3253 return fold_build2 (TREE_CODE (arg0), type, common,
3254 fold_build2 (code, type, left, right));
3257 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3258 with code CODE. This optimization is unsafe. */
3260 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3262 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3263 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3265 /* (A / C) +- (B / C) -> (A +- B) / C. */
3267 && operand_equal_p (TREE_OPERAND (arg0, 1),
3268 TREE_OPERAND (arg1, 1), 0))
3269 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3270 fold_build2 (code, type,
3271 TREE_OPERAND (arg0, 0),
3272 TREE_OPERAND (arg1, 0)),
3273 TREE_OPERAND (arg0, 1));
3275 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3276 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3277 TREE_OPERAND (arg1, 0), 0)
3278 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3279 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3281 REAL_VALUE_TYPE r0, r1;
3282 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3283 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3285 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3287 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3288 real_arithmetic (&r0, code, &r0, &r1);
3289 return fold_build2 (MULT_EXPR, type,
3290 TREE_OPERAND (arg0, 0),
3291 build_real (type, r0));
3297 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3298 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3301 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3308 tree size = TYPE_SIZE (TREE_TYPE (inner));
3309 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3310 || POINTER_TYPE_P (TREE_TYPE (inner)))
3311 && host_integerp (size, 0)
3312 && tree_low_cst (size, 0) == bitsize)
3313 return fold_convert (type, inner);
3316 result = build3 (BIT_FIELD_REF, type, inner,
3317 size_int (bitsize), bitsize_int (bitpos));
3319 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3324 /* Optimize a bit-field compare.
3326 There are two cases: First is a compare against a constant and the
3327 second is a comparison of two items where the fields are at the same
3328 bit position relative to the start of a chunk (byte, halfword, word)
3329 large enough to contain it. In these cases we can avoid the shift
3330 implicit in bitfield extractions.
3332 For constants, we emit a compare of the shifted constant with the
3333 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3334 compared. For two fields at the same position, we do the ANDs with the
3335 similar mask and compare the result of the ANDs.
3337 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3338 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3339 are the left and right operands of the comparison, respectively.
3341 If the optimization described above can be done, we return the resulting
3342 tree. Otherwise we return zero. */
3345 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3348 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3349 tree type = TREE_TYPE (lhs);
3350 tree signed_type, unsigned_type;
3351 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3352 enum machine_mode lmode, rmode, nmode;
3353 int lunsignedp, runsignedp;
3354 int lvolatilep = 0, rvolatilep = 0;
3355 tree linner, rinner = NULL_TREE;
3359 /* Get all the information about the extractions being done. If the bit size
3360 if the same as the size of the underlying object, we aren't doing an
3361 extraction at all and so can do nothing. We also don't want to
3362 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3363 then will no longer be able to replace it. */
3364 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3365 &lunsignedp, &lvolatilep, false);
3366 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3367 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3372 /* If this is not a constant, we can only do something if bit positions,
3373 sizes, and signedness are the same. */
3374 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3375 &runsignedp, &rvolatilep, false);
3377 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3378 || lunsignedp != runsignedp || offset != 0
3379 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3383 /* See if we can find a mode to refer to this field. We should be able to,
3384 but fail if we can't. */
3385 nmode = get_best_mode (lbitsize, lbitpos,
3386 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3387 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3388 TYPE_ALIGN (TREE_TYPE (rinner))),
3389 word_mode, lvolatilep || rvolatilep);
3390 if (nmode == VOIDmode)
3393 /* Set signed and unsigned types of the precision of this mode for the
3395 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3396 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3398 /* Compute the bit position and size for the new reference and our offset
3399 within it. If the new reference is the same size as the original, we
3400 won't optimize anything, so return zero. */
3401 nbitsize = GET_MODE_BITSIZE (nmode);
3402 nbitpos = lbitpos & ~ (nbitsize - 1);
3404 if (nbitsize == lbitsize)
3407 if (BYTES_BIG_ENDIAN)
3408 lbitpos = nbitsize - lbitsize - lbitpos;
3410 /* Make the mask to be used against the extracted field. */
3411 mask = build_int_cst (unsigned_type, -1);
3412 mask = force_fit_type (mask, 0, false, false);
3413 mask = fold_convert (unsigned_type, mask);
3414 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3415 mask = const_binop (RSHIFT_EXPR, mask,
3416 size_int (nbitsize - lbitsize - lbitpos), 0);
3419 /* If not comparing with constant, just rework the comparison
3421 return build2 (code, compare_type,
3422 build2 (BIT_AND_EXPR, unsigned_type,
3423 make_bit_field_ref (linner, unsigned_type,
3424 nbitsize, nbitpos, 1),
3426 build2 (BIT_AND_EXPR, unsigned_type,
3427 make_bit_field_ref (rinner, unsigned_type,
3428 nbitsize, nbitpos, 1),
3431 /* Otherwise, we are handling the constant case. See if the constant is too
3432 big for the field. Warn and return a tree of for 0 (false) if so. We do
3433 this not only for its own sake, but to avoid having to test for this
3434 error case below. If we didn't, we might generate wrong code.
3436 For unsigned fields, the constant shifted right by the field length should
3437 be all zero. For signed fields, the high-order bits should agree with
3442 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3443 fold_convert (unsigned_type, rhs),
3444 size_int (lbitsize), 0)))
3446 warning (0, "comparison is always %d due to width of bit-field",
3448 return constant_boolean_node (code == NE_EXPR, compare_type);
3453 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3454 size_int (lbitsize - 1), 0);
3455 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3457 warning (0, "comparison is always %d due to width of bit-field",
3459 return constant_boolean_node (code == NE_EXPR, compare_type);
3463 /* Single-bit compares should always be against zero. */
3464 if (lbitsize == 1 && ! integer_zerop (rhs))
3466 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3467 rhs = build_int_cst (type, 0);
3470 /* Make a new bitfield reference, shift the constant over the
3471 appropriate number of bits and mask it with the computed mask
3472 (in case this was a signed field). If we changed it, make a new one. */
3473 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3476 TREE_SIDE_EFFECTS (lhs) = 1;
3477 TREE_THIS_VOLATILE (lhs) = 1;
3480 rhs = const_binop (BIT_AND_EXPR,
3481 const_binop (LSHIFT_EXPR,
3482 fold_convert (unsigned_type, rhs),
3483 size_int (lbitpos), 0),
3486 return build2 (code, compare_type,
3487 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3491 /* Subroutine for fold_truthop: decode a field reference.
3493 If EXP is a comparison reference, we return the innermost reference.
3495 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3496 set to the starting bit number.
3498 If the innermost field can be completely contained in a mode-sized
3499 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3501 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3502 otherwise it is not changed.
3504 *PUNSIGNEDP is set to the signedness of the field.
3506 *PMASK is set to the mask used. This is either contained in a
3507 BIT_AND_EXPR or derived from the width of the field.
3509 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3511 Return 0 if this is not a component reference or is one that we can't
3512 do anything with. */
3515 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3516 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3517 int *punsignedp, int *pvolatilep,
3518 tree *pmask, tree *pand_mask)
3520 tree outer_type = 0;
3522 tree mask, inner, offset;
3524 unsigned int precision;
3526 /* All the optimizations using this function assume integer fields.
3527 There are problems with FP fields since the type_for_size call
3528 below can fail for, e.g., XFmode. */
3529 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3532 /* We are interested in the bare arrangement of bits, so strip everything
3533 that doesn't affect the machine mode. However, record the type of the
3534 outermost expression if it may matter below. */
3535 if (TREE_CODE (exp) == NOP_EXPR
3536 || TREE_CODE (exp) == CONVERT_EXPR
3537 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3538 outer_type = TREE_TYPE (exp);
3541 if (TREE_CODE (exp) == BIT_AND_EXPR)
3543 and_mask = TREE_OPERAND (exp, 1);
3544 exp = TREE_OPERAND (exp, 0);
3545 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3546 if (TREE_CODE (and_mask) != INTEGER_CST)
3550 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3551 punsignedp, pvolatilep, false);
3552 if ((inner == exp && and_mask == 0)
3553 || *pbitsize < 0 || offset != 0
3554 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3557 /* If the number of bits in the reference is the same as the bitsize of
3558 the outer type, then the outer type gives the signedness. Otherwise
3559 (in case of a small bitfield) the signedness is unchanged. */
3560 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3561 *punsignedp = TYPE_UNSIGNED (outer_type);
3563 /* Compute the mask to access the bitfield. */
3564 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3565 precision = TYPE_PRECISION (unsigned_type);
3567 mask = build_int_cst (unsigned_type, -1);
3568 mask = force_fit_type (mask, 0, false, false);
3570 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3571 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3573 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3575 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3576 fold_convert (unsigned_type, and_mask), mask);
3579 *pand_mask = and_mask;
3583 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3587 all_ones_mask_p (tree mask, int size)
3589 tree type = TREE_TYPE (mask);
3590 unsigned int precision = TYPE_PRECISION (type);
3593 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3594 tmask = force_fit_type (tmask, 0, false, false);
3597 tree_int_cst_equal (mask,
3598 const_binop (RSHIFT_EXPR,
3599 const_binop (LSHIFT_EXPR, tmask,
3600 size_int (precision - size),
3602 size_int (precision - size), 0));
3605 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3606 represents the sign bit of EXP's type. If EXP represents a sign
3607 or zero extension, also test VAL against the unextended type.
3608 The return value is the (sub)expression whose sign bit is VAL,
3609 or NULL_TREE otherwise. */
3612 sign_bit_p (tree exp, tree val)
3614 unsigned HOST_WIDE_INT mask_lo, lo;
3615 HOST_WIDE_INT mask_hi, hi;
3619 /* Tree EXP must have an integral type. */
3620 t = TREE_TYPE (exp);
3621 if (! INTEGRAL_TYPE_P (t))
3624 /* Tree VAL must be an integer constant. */
3625 if (TREE_CODE (val) != INTEGER_CST
3626 || TREE_CONSTANT_OVERFLOW (val))
3629 width = TYPE_PRECISION (t);
3630 if (width > HOST_BITS_PER_WIDE_INT)
3632 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3635 mask_hi = ((unsigned HOST_WIDE_INT) -1
3636 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3642 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3645 mask_lo = ((unsigned HOST_WIDE_INT) -1
3646 >> (HOST_BITS_PER_WIDE_INT - width));
3649 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3650 treat VAL as if it were unsigned. */
3651 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3652 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3655 /* Handle extension from a narrower type. */
3656 if (TREE_CODE (exp) == NOP_EXPR
3657 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3658 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3663 /* Subroutine for fold_truthop: determine if an operand is simple enough
3664 to be evaluated unconditionally. */
3667 simple_operand_p (tree exp)
3669 /* Strip any conversions that don't change the machine mode. */
3672 return (CONSTANT_CLASS_P (exp)
3673 || TREE_CODE (exp) == SSA_NAME
3675 && ! TREE_ADDRESSABLE (exp)
3676 && ! TREE_THIS_VOLATILE (exp)
3677 && ! DECL_NONLOCAL (exp)
3678 /* Don't regard global variables as simple. They may be
3679 allocated in ways unknown to the compiler (shared memory,
3680 #pragma weak, etc). */
3681 && ! TREE_PUBLIC (exp)
3682 && ! DECL_EXTERNAL (exp)
3683 /* Loading a static variable is unduly expensive, but global
3684 registers aren't expensive. */
3685 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3688 /* The following functions are subroutines to fold_range_test and allow it to
3689 try to change a logical combination of comparisons into a range test.
3692 X == 2 || X == 3 || X == 4 || X == 5
3696 (unsigned) (X - 2) <= 3
3698 We describe each set of comparisons as being either inside or outside
3699 a range, using a variable named like IN_P, and then describe the
3700 range with a lower and upper bound. If one of the bounds is omitted,
3701 it represents either the highest or lowest value of the type.
3703 In the comments below, we represent a range by two numbers in brackets
3704 preceded by a "+" to designate being inside that range, or a "-" to
3705 designate being outside that range, so the condition can be inverted by
3706 flipping the prefix. An omitted bound is represented by a "-". For
3707 example, "- [-, 10]" means being outside the range starting at the lowest
3708 possible value and ending at 10, in other words, being greater than 10.
3709 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3712 We set up things so that the missing bounds are handled in a consistent
3713 manner so neither a missing bound nor "true" and "false" need to be
3714 handled using a special case. */
3716 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3717 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3718 and UPPER1_P are nonzero if the respective argument is an upper bound
3719 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3720 must be specified for a comparison. ARG1 will be converted to ARG0's
3721 type if both are specified. */
3724 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3725 tree arg1, int upper1_p)
3731 /* If neither arg represents infinity, do the normal operation.
3732 Else, if not a comparison, return infinity. Else handle the special
3733 comparison rules. Note that most of the cases below won't occur, but
3734 are handled for consistency. */
3736 if (arg0 != 0 && arg1 != 0)
3738 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3739 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3741 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3744 if (TREE_CODE_CLASS (code) != tcc_comparison)
3747 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3748 for neither. In real maths, we cannot assume open ended ranges are
3749 the same. But, this is computer arithmetic, where numbers are finite.
3750 We can therefore make the transformation of any unbounded range with
3751 the value Z, Z being greater than any representable number. This permits
3752 us to treat unbounded ranges as equal. */
3753 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3754 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3758 result = sgn0 == sgn1;
3761 result = sgn0 != sgn1;
3764 result = sgn0 < sgn1;
3767 result = sgn0 <= sgn1;
3770 result = sgn0 > sgn1;
3773 result = sgn0 >= sgn1;
3779 return constant_boolean_node (result, type);
3782 /* Given EXP, a logical expression, set the range it is testing into
3783 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3784 actually being tested. *PLOW and *PHIGH will be made of the same type
3785 as the returned expression. If EXP is not a comparison, we will most
3786 likely not be returning a useful value and range. */
3789 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3791 enum tree_code code;
3792 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3793 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3795 tree low, high, n_low, n_high;
3797 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3798 and see if we can refine the range. Some of the cases below may not
3799 happen, but it doesn't seem worth worrying about this. We "continue"
3800 the outer loop when we've changed something; otherwise we "break"
3801 the switch, which will "break" the while. */
3804 low = high = build_int_cst (TREE_TYPE (exp), 0);
3808 code = TREE_CODE (exp);
3809 exp_type = TREE_TYPE (exp);
3811 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3813 if (TREE_CODE_LENGTH (code) > 0)
3814 arg0 = TREE_OPERAND (exp, 0);
3815 if (TREE_CODE_CLASS (code) == tcc_comparison
3816 || TREE_CODE_CLASS (code) == tcc_unary
3817 || TREE_CODE_CLASS (code) == tcc_binary)
3818 arg0_type = TREE_TYPE (arg0);
3819 if (TREE_CODE_CLASS (code) == tcc_binary
3820 || TREE_CODE_CLASS (code) == tcc_comparison
3821 || (TREE_CODE_CLASS (code) == tcc_expression
3822 && TREE_CODE_LENGTH (code) > 1))
3823 arg1 = TREE_OPERAND (exp, 1);
3828 case TRUTH_NOT_EXPR:
3829 in_p = ! in_p, exp = arg0;
3832 case EQ_EXPR: case NE_EXPR:
3833 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3834 /* We can only do something if the range is testing for zero
3835 and if the second operand is an integer constant. Note that
3836 saying something is "in" the range we make is done by
3837 complementing IN_P since it will set in the initial case of
3838 being not equal to zero; "out" is leaving it alone. */
3839 if (low == 0 || high == 0
3840 || ! integer_zerop (low) || ! integer_zerop (high)
3841 || TREE_CODE (arg1) != INTEGER_CST)
3846 case NE_EXPR: /* - [c, c] */
3849 case EQ_EXPR: /* + [c, c] */
3850 in_p = ! in_p, low = high = arg1;
3852 case GT_EXPR: /* - [-, c] */
3853 low = 0, high = arg1;
3855 case GE_EXPR: /* + [c, -] */
3856 in_p = ! in_p, low = arg1, high = 0;
3858 case LT_EXPR: /* - [c, -] */
3859 low = arg1, high = 0;
3861 case LE_EXPR: /* + [-, c] */
3862 in_p = ! in_p, low = 0, high = arg1;
3868 /* If this is an unsigned comparison, we also know that EXP is
3869 greater than or equal to zero. We base the range tests we make
3870 on that fact, so we record it here so we can parse existing
3871 range tests. We test arg0_type since often the return type
3872 of, e.g. EQ_EXPR, is boolean. */
3873 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3875 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3877 build_int_cst (arg0_type, 0),
3881 in_p = n_in_p, low = n_low, high = n_high;
3883 /* If the high bound is missing, but we have a nonzero low
3884 bound, reverse the range so it goes from zero to the low bound
3886 if (high == 0 && low && ! integer_zerop (low))
3889 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3890 integer_one_node, 0);
3891 low = build_int_cst (arg0_type, 0);
3899 /* (-x) IN [a,b] -> x in [-b, -a] */
3900 n_low = range_binop (MINUS_EXPR, exp_type,
3901 build_int_cst (exp_type, 0),
3903 n_high = range_binop (MINUS_EXPR, exp_type,
3904 build_int_cst (exp_type, 0),
3906 low = n_low, high = n_high;
3912 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3913 build_int_cst (exp_type, 1));
3916 case PLUS_EXPR: case MINUS_EXPR:
3917 if (TREE_CODE (arg1) != INTEGER_CST)
3920 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3921 move a constant to the other side. */
3922 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3925 /* If EXP is signed, any overflow in the computation is undefined,
3926 so we don't worry about it so long as our computations on
3927 the bounds don't overflow. For unsigned, overflow is defined
3928 and this is exactly the right thing. */
3929 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3930 arg0_type, low, 0, arg1, 0);
3931 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3932 arg0_type, high, 1, arg1, 0);
3933 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3934 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3937 /* Check for an unsigned range which has wrapped around the maximum
3938 value thus making n_high < n_low, and normalize it. */
3939 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3941 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3942 integer_one_node, 0);
3943 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3944 integer_one_node, 0);
3946 /* If the range is of the form +/- [ x+1, x ], we won't
3947 be able to normalize it. But then, it represents the
3948 whole range or the empty set, so make it
3950 if (tree_int_cst_equal (n_low, low)
3951 && tree_int_cst_equal (n_high, high))
3957 low = n_low, high = n_high;
3962 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3963 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3966 if (! INTEGRAL_TYPE_P (arg0_type)
3967 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3968 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3971 n_low = low, n_high = high;
3974 n_low = fold_convert (arg0_type, n_low);
3977 n_high = fold_convert (arg0_type, n_high);
3980 /* If we're converting arg0 from an unsigned type, to exp,
3981 a signed type, we will be doing the comparison as unsigned.
3982 The tests above have already verified that LOW and HIGH
3985 So we have to ensure that we will handle large unsigned
3986 values the same way that the current signed bounds treat
3989 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3992 tree equiv_type = lang_hooks.types.type_for_mode
3993 (TYPE_MODE (arg0_type), 1);
3995 /* A range without an upper bound is, naturally, unbounded.
3996 Since convert would have cropped a very large value, use
3997 the max value for the destination type. */
3999 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4000 : TYPE_MAX_VALUE (arg0_type);
4002 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4003 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4004 fold_convert (arg0_type,
4006 fold_convert (arg0_type,
4009 /* If the low bound is specified, "and" the range with the
4010 range for which the original unsigned value will be
4014 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4015 1, n_low, n_high, 1,
4016 fold_convert (arg0_type,
4021 in_p = (n_in_p == in_p);
4025 /* Otherwise, "or" the range with the range of the input
4026 that will be interpreted as negative. */
4027 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4028 0, n_low, n_high, 1,
4029 fold_convert (arg0_type,
4034 in_p = (in_p != n_in_p);
4039 low = n_low, high = n_high;
4049 /* If EXP is a constant, we can evaluate whether this is true or false. */
4050 if (TREE_CODE (exp) == INTEGER_CST)
4052 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4054 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4060 *pin_p = in_p, *plow = low, *phigh = high;
4064 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4065 type, TYPE, return an expression to test if EXP is in (or out of, depending
4066 on IN_P) the range. Return 0 if the test couldn't be created. */
4069 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4071 tree etype = TREE_TYPE (exp);
4074 #ifdef HAVE_canonicalize_funcptr_for_compare
4075 /* Disable this optimization for function pointer expressions
4076 on targets that require function pointer canonicalization. */
4077 if (HAVE_canonicalize_funcptr_for_compare
4078 && TREE_CODE (etype) == POINTER_TYPE
4079 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4085 value = build_range_check (type, exp, 1, low, high);
4087 return invert_truthvalue (value);
4092 if (low == 0 && high == 0)
4093 return build_int_cst (type, 1);
4096 return fold_build2 (LE_EXPR, type, exp,
4097 fold_convert (etype, high));
4100 return fold_build2 (GE_EXPR, type, exp,
4101 fold_convert (etype, low));
4103 if (operand_equal_p (low, high, 0))
4104 return fold_build2 (EQ_EXPR, type, exp,
4105 fold_convert (etype, low));
4107 if (integer_zerop (low))
4109 if (! TYPE_UNSIGNED (etype))
4111 etype = lang_hooks.types.unsigned_type (etype);
4112 high = fold_convert (etype, high);
4113 exp = fold_convert (etype, exp);
4115 return build_range_check (type, exp, 1, 0, high);
4118 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4119 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4121 unsigned HOST_WIDE_INT lo;
4125 prec = TYPE_PRECISION (etype);
4126 if (prec <= HOST_BITS_PER_WIDE_INT)
4129 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4133 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4134 lo = (unsigned HOST_WIDE_INT) -1;
4137 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4139 if (TYPE_UNSIGNED (etype))
4141 etype = lang_hooks.types.signed_type (etype);
4142 exp = fold_convert (etype, exp);
4144 return fold_build2 (GT_EXPR, type, exp,
4145 build_int_cst (etype, 0));
4149 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4150 This requires wrap-around arithmetics for the type of the expression. */
4151 switch (TREE_CODE (etype))
4154 /* There is no requirement that LOW be within the range of ETYPE
4155 if the latter is a subtype. It must, however, be within the base
4156 type of ETYPE. So be sure we do the subtraction in that type. */
4157 if (TREE_TYPE (etype))
4158 etype = TREE_TYPE (etype);
4163 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4164 TYPE_UNSIGNED (etype));
4171 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4172 if (TREE_CODE (etype) == INTEGER_TYPE
4173 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4175 tree utype, minv, maxv;
4177 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4178 for the type in question, as we rely on this here. */
4179 utype = lang_hooks.types.unsigned_type (etype);
4180 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4181 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4182 integer_one_node, 1);
4183 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4185 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4192 high = fold_convert (etype, high);
4193 low = fold_convert (etype, low);
4194 exp = fold_convert (etype, exp);
4196 value = const_binop (MINUS_EXPR, high, low, 0);
4198 if (value != 0 && !TREE_OVERFLOW (value))
4199 return build_range_check (type,
4200 fold_build2 (MINUS_EXPR, etype, exp, low),
4201 1, build_int_cst (etype, 0), value);
4206 /* Return the predecessor of VAL in its type, handling the infinite case. */
4209 range_predecessor (tree val)
4211 tree type = TREE_TYPE (val);
4213 if (INTEGRAL_TYPE_P (type)
4214 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4217 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4220 /* Return the successor of VAL in its type, handling the infinite case. */
4223 range_successor (tree val)
4225 tree type = TREE_TYPE (val);
4227 if (INTEGRAL_TYPE_P (type)
4228 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4231 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4234 /* Given two ranges, see if we can merge them into one. Return 1 if we
4235 can, 0 if we can't. Set the output range into the specified parameters. */
4238 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4239 tree high0, int in1_p, tree low1, tree high1)
4247 int lowequal = ((low0 == 0 && low1 == 0)
4248 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4249 low0, 0, low1, 0)));
4250 int highequal = ((high0 == 0 && high1 == 0)
4251 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4252 high0, 1, high1, 1)));
4254 /* Make range 0 be the range that starts first, or ends last if they
4255 start at the same value. Swap them if it isn't. */
4256 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4259 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4260 high1, 1, high0, 1))))
4262 temp = in0_p, in0_p = in1_p, in1_p = temp;
4263 tem = low0, low0 = low1, low1 = tem;
4264 tem = high0, high0 = high1, high1 = tem;
4267 /* Now flag two cases, whether the ranges are disjoint or whether the
4268 second range is totally subsumed in the first. Note that the tests
4269 below are simplified by the ones above. */
4270 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4271 high0, 1, low1, 0));
4272 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4273 high1, 1, high0, 1));
4275 /* We now have four cases, depending on whether we are including or
4276 excluding the two ranges. */
4279 /* If they don't overlap, the result is false. If the second range
4280 is a subset it is the result. Otherwise, the range is from the start
4281 of the second to the end of the first. */
4283 in_p = 0, low = high = 0;
4285 in_p = 1, low = low1, high = high1;
4287 in_p = 1, low = low1, high = high0;
4290 else if (in0_p && ! in1_p)
4292 /* If they don't overlap, the result is the first range. If they are
4293 equal, the result is false. If the second range is a subset of the
4294 first, and the ranges begin at the same place, we go from just after
4295 the end of the second range to the end of the first. If the second
4296 range is not a subset of the first, or if it is a subset and both
4297 ranges end at the same place, the range starts at the start of the
4298 first range and ends just before the second range.
4299 Otherwise, we can't describe this as a single range. */
4301 in_p = 1, low = low0, high = high0;
4302 else if (lowequal && highequal)
4303 in_p = 0, low = high = 0;
4304 else if (subset && lowequal)
4306 low = range_successor (high1);
4310 else if (! subset || highequal)
4313 high = range_predecessor (low1);
4320 else if (! in0_p && in1_p)
4322 /* If they don't overlap, the result is the second range. If the second
4323 is a subset of the first, the result is false. Otherwise,
4324 the range starts just after the first range and ends at the
4325 end of the second. */
4327 in_p = 1, low = low1, high = high1;
4328 else if (subset || highequal)
4329 in_p = 0, low = high = 0;
4332 low = range_successor (high0);
4340 /* The case where we are excluding both ranges. Here the complex case
4341 is if they don't overlap. In that case, the only time we have a
4342 range is if they are adjacent. If the second is a subset of the
4343 first, the result is the first. Otherwise, the range to exclude
4344 starts at the beginning of the first range and ends at the end of the
4348 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4349 range_successor (high0),
4351 in_p = 0, low = low0, high = high1;
4354 /* Canonicalize - [min, x] into - [-, x]. */
4355 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4356 switch (TREE_CODE (TREE_TYPE (low0)))
4359 if (TYPE_PRECISION (TREE_TYPE (low0))
4360 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4364 if (tree_int_cst_equal (low0,
4365 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4369 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4370 && integer_zerop (low0))
4377 /* Canonicalize - [x, max] into - [x, -]. */
4378 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4379 switch (TREE_CODE (TREE_TYPE (high1)))
4382 if (TYPE_PRECISION (TREE_TYPE (high1))
4383 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4387 if (tree_int_cst_equal (high1,
4388 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4392 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4393 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4395 integer_one_node, 1)))
4402 /* The ranges might be also adjacent between the maximum and
4403 minimum values of the given type. For
4404 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4405 return + [x + 1, y - 1]. */
4406 if (low0 == 0 && high1 == 0)
4408 low = range_successor (high0);
4409 high = range_predecessor (low1);
4410 if (low == 0 || high == 0)
4420 in_p = 0, low = low0, high = high0;
4422 in_p = 0, low = low0, high = high1;
4425 *pin_p = in_p, *plow = low, *phigh = high;
4430 /* Subroutine of fold, looking inside expressions of the form
4431 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4432 of the COND_EXPR. This function is being used also to optimize
4433 A op B ? C : A, by reversing the comparison first.
4435 Return a folded expression whose code is not a COND_EXPR
4436 anymore, or NULL_TREE if no folding opportunity is found. */
4439 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4441 enum tree_code comp_code = TREE_CODE (arg0);
4442 tree arg00 = TREE_OPERAND (arg0, 0);
4443 tree arg01 = TREE_OPERAND (arg0, 1);
4444 tree arg1_type = TREE_TYPE (arg1);
4450 /* If we have A op 0 ? A : -A, consider applying the following
4453 A == 0? A : -A same as -A
4454 A != 0? A : -A same as A
4455 A >= 0? A : -A same as abs (A)
4456 A > 0? A : -A same as abs (A)
4457 A <= 0? A : -A same as -abs (A)
4458 A < 0? A : -A same as -abs (A)
4460 None of these transformations work for modes with signed
4461 zeros. If A is +/-0, the first two transformations will
4462 change the sign of the result (from +0 to -0, or vice
4463 versa). The last four will fix the sign of the result,
4464 even though the original expressions could be positive or
4465 negative, depending on the sign of A.
4467 Note that all these transformations are correct if A is
4468 NaN, since the two alternatives (A and -A) are also NaNs. */
4469 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4470 ? real_zerop (arg01)
4471 : integer_zerop (arg01))
4472 && ((TREE_CODE (arg2) == NEGATE_EXPR
4473 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4474 /* In the case that A is of the form X-Y, '-A' (arg2) may
4475 have already been folded to Y-X, check for that. */
4476 || (TREE_CODE (arg1) == MINUS_EXPR
4477 && TREE_CODE (arg2) == MINUS_EXPR
4478 && operand_equal_p (TREE_OPERAND (arg1, 0),
4479 TREE_OPERAND (arg2, 1), 0)
4480 && operand_equal_p (TREE_OPERAND (arg1, 1),
4481 TREE_OPERAND (arg2, 0), 0))))
4486 tem = fold_convert (arg1_type, arg1);
4487 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4490 return pedantic_non_lvalue (fold_convert (type, arg1));
4493 if (flag_trapping_math)
4498 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4499 arg1 = fold_convert (lang_hooks.types.signed_type
4500 (TREE_TYPE (arg1)), arg1);
4501 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4502 return pedantic_non_lvalue (fold_convert (type, tem));
4505 if (flag_trapping_math)
4509 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4510 arg1 = fold_convert (lang_hooks.types.signed_type
4511 (TREE_TYPE (arg1)), arg1);
4512 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4513 return negate_expr (fold_convert (type, tem));
4515 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4519 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4520 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4521 both transformations are correct when A is NaN: A != 0
4522 is then true, and A == 0 is false. */
4524 if (integer_zerop (arg01) && integer_zerop (arg2))
4526 if (comp_code == NE_EXPR)
4527 return pedantic_non_lvalue (fold_convert (type, arg1));
4528 else if (comp_code == EQ_EXPR)
4529 return build_int_cst (type, 0);
4532 /* Try some transformations of A op B ? A : B.
4534 A == B? A : B same as B
4535 A != B? A : B same as A
4536 A >= B? A : B same as max (A, B)
4537 A > B? A : B same as max (B, A)
4538 A <= B? A : B same as min (A, B)
4539 A < B? A : B same as min (B, A)
4541 As above, these transformations don't work in the presence
4542 of signed zeros. For example, if A and B are zeros of
4543 opposite sign, the first two transformations will change
4544 the sign of the result. In the last four, the original
4545 expressions give different results for (A=+0, B=-0) and
4546 (A=-0, B=+0), but the transformed expressions do not.
4548 The first two transformations are correct if either A or B
4549 is a NaN. In the first transformation, the condition will
4550 be false, and B will indeed be chosen. In the case of the
4551 second transformation, the condition A != B will be true,
4552 and A will be chosen.
4554 The conversions to max() and min() are not correct if B is
4555 a number and A is not. The conditions in the original
4556 expressions will be false, so all four give B. The min()
4557 and max() versions would give a NaN instead. */
4558 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4559 /* Avoid these transformations if the COND_EXPR may be used
4560 as an lvalue in the C++ front-end. PR c++/19199. */
4562 || (strcmp (lang_hooks.name, "GNU C++") != 0
4563 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4564 || ! maybe_lvalue_p (arg1)
4565 || ! maybe_lvalue_p (arg2)))
4567 tree comp_op0 = arg00;
4568 tree comp_op1 = arg01;
4569 tree comp_type = TREE_TYPE (comp_op0);
4571 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4572 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4582 return pedantic_non_lvalue (fold_convert (type, arg2));
4584 return pedantic_non_lvalue (fold_convert (type, arg1));
4589 /* In C++ a ?: expression can be an lvalue, so put the
4590 operand which will be used if they are equal first
4591 so that we can convert this back to the
4592 corresponding COND_EXPR. */
4593 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4595 comp_op0 = fold_convert (comp_type, comp_op0);
4596 comp_op1 = fold_convert (comp_type, comp_op1);
4597 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4598 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4599 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4600 return pedantic_non_lvalue (fold_convert (type, tem));
4607 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4609 comp_op0 = fold_convert (comp_type, comp_op0);
4610 comp_op1 = fold_convert (comp_type, comp_op1);
4611 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4612 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4613 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4614 return pedantic_non_lvalue (fold_convert (type, tem));
4618 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4619 return pedantic_non_lvalue (fold_convert (type, arg2));
4622 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4623 return pedantic_non_lvalue (fold_convert (type, arg1));
4626 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4631 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4632 we might still be able to simplify this. For example,
4633 if C1 is one less or one more than C2, this might have started
4634 out as a MIN or MAX and been transformed by this function.
4635 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4637 if (INTEGRAL_TYPE_P (type)
4638 && TREE_CODE (arg01) == INTEGER_CST
4639 && TREE_CODE (arg2) == INTEGER_CST)
4643 /* We can replace A with C1 in this case. */
4644 arg1 = fold_convert (type, arg01);
4645 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4648 /* If C1 is C2 + 1, this is min(A, C2). */
4649 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4651 && operand_equal_p (arg01,
4652 const_binop (PLUS_EXPR, arg2,
4653 integer_one_node, 0),
4655 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4660 /* If C1 is C2 - 1, this is min(A, C2). */
4661 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4663 && operand_equal_p (arg01,
4664 const_binop (MINUS_EXPR, arg2,
4665 integer_one_node, 0),
4667 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4672 /* If C1 is C2 - 1, this is max(A, C2). */
4673 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4675 && operand_equal_p (arg01,
4676 const_binop (MINUS_EXPR, arg2,
4677 integer_one_node, 0),
4679 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4684 /* If C1 is C2 + 1, this is max(A, C2). */
4685 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4687 && operand_equal_p (arg01,
4688 const_binop (PLUS_EXPR, arg2,
4689 integer_one_node, 0),
4691 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4705 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4706 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4709 /* EXP is some logical combination of boolean tests. See if we can
4710 merge it into some range test. Return the new tree if so. */
4713 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4715 int or_op = (code == TRUTH_ORIF_EXPR
4716 || code == TRUTH_OR_EXPR);
4717 int in0_p, in1_p, in_p;
4718 tree low0, low1, low, high0, high1, high;
4719 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4720 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4723 /* If this is an OR operation, invert both sides; we will invert
4724 again at the end. */
4726 in0_p = ! in0_p, in1_p = ! in1_p;
4728 /* If both expressions are the same, if we can merge the ranges, and we
4729 can build the range test, return it or it inverted. If one of the
4730 ranges is always true or always false, consider it to be the same
4731 expression as the other. */
4732 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4733 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4735 && 0 != (tem = (build_range_check (type,
4737 : rhs != 0 ? rhs : integer_zero_node,
4739 return or_op ? invert_truthvalue (tem) : tem;
4741 /* On machines where the branch cost is expensive, if this is a
4742 short-circuited branch and the underlying object on both sides
4743 is the same, make a non-short-circuit operation. */
4744 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4745 && lhs != 0 && rhs != 0
4746 && (code == TRUTH_ANDIF_EXPR
4747 || code == TRUTH_ORIF_EXPR)
4748 && operand_equal_p (lhs, rhs, 0))
4750 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4751 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4752 which cases we can't do this. */
4753 if (simple_operand_p (lhs))
4754 return build2 (code == TRUTH_ANDIF_EXPR
4755 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4758 else if (lang_hooks.decls.global_bindings_p () == 0
4759 && ! CONTAINS_PLACEHOLDER_P (lhs))
4761 tree common = save_expr (lhs);
4763 if (0 != (lhs = build_range_check (type, common,
4764 or_op ? ! in0_p : in0_p,
4766 && (0 != (rhs = build_range_check (type, common,
4767 or_op ? ! in1_p : in1_p,
4769 return build2 (code == TRUTH_ANDIF_EXPR
4770 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4778 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4779 bit value. Arrange things so the extra bits will be set to zero if and
4780 only if C is signed-extended to its full width. If MASK is nonzero,
4781 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4784 unextend (tree c, int p, int unsignedp, tree mask)
4786 tree type = TREE_TYPE (c);
4787 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4790 if (p == modesize || unsignedp)
4793 /* We work by getting just the sign bit into the low-order bit, then
4794 into the high-order bit, then sign-extend. We then XOR that value
4796 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4797 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4799 /* We must use a signed type in order to get an arithmetic right shift.
4800 However, we must also avoid introducing accidental overflows, so that
4801 a subsequent call to integer_zerop will work. Hence we must
4802 do the type conversion here. At this point, the constant is either
4803 zero or one, and the conversion to a signed type can never overflow.
4804 We could get an overflow if this conversion is done anywhere else. */
4805 if (TYPE_UNSIGNED (type))
4806 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4808 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4809 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4811 temp = const_binop (BIT_AND_EXPR, temp,
4812 fold_convert (TREE_TYPE (c), mask), 0);
4813 /* If necessary, convert the type back to match the type of C. */
4814 if (TYPE_UNSIGNED (type))
4815 temp = fold_convert (type, temp);
4817 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4820 /* Find ways of folding logical expressions of LHS and RHS:
4821 Try to merge two comparisons to the same innermost item.
4822 Look for range tests like "ch >= '0' && ch <= '9'".
4823 Look for combinations of simple terms on machines with expensive branches
4824 and evaluate the RHS unconditionally.
4826 For example, if we have p->a == 2 && p->b == 4 and we can make an
4827 object large enough to span both A and B, we can do this with a comparison
4828 against the object ANDed with the a mask.
4830 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4831 operations to do this with one comparison.
4833 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4834 function and the one above.
4836 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4837 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4839 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4842 We return the simplified tree or 0 if no optimization is possible. */
4845 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4847 /* If this is the "or" of two comparisons, we can do something if
4848 the comparisons are NE_EXPR. If this is the "and", we can do something
4849 if the comparisons are EQ_EXPR. I.e.,
4850 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4852 WANTED_CODE is this operation code. For single bit fields, we can
4853 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4854 comparison for one-bit fields. */
4856 enum tree_code wanted_code;
4857 enum tree_code lcode, rcode;
4858 tree ll_arg, lr_arg, rl_arg, rr_arg;
4859 tree ll_inner, lr_inner, rl_inner, rr_inner;
4860 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4861 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4862 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4863 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4864 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4865 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4866 enum machine_mode lnmode, rnmode;
4867 tree ll_mask, lr_mask, rl_mask, rr_mask;
4868 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4869 tree l_const, r_const;
4870 tree lntype, rntype, result;
4871 int first_bit, end_bit;
4873 tree orig_lhs = lhs, orig_rhs = rhs;
4874 enum tree_code orig_code = code;
4876 /* Start by getting the comparison codes. Fail if anything is volatile.
4877 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4878 it were surrounded with a NE_EXPR. */
4880 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4883 lcode = TREE_CODE (lhs);
4884 rcode = TREE_CODE (rhs);
4886 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4888 lhs = build2 (NE_EXPR, truth_type, lhs,
4889 build_int_cst (TREE_TYPE (lhs), 0));
4893 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4895 rhs = build2 (NE_EXPR, truth_type, rhs,
4896 build_int_cst (TREE_TYPE (rhs), 0));
4900 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4901 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4904 ll_arg = TREE_OPERAND (lhs, 0);
4905 lr_arg = TREE_OPERAND (lhs, 1);
4906 rl_arg = TREE_OPERAND (rhs, 0);
4907 rr_arg = TREE_OPERAND (rhs, 1);
4909 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4910 if (simple_operand_p (ll_arg)
4911 && simple_operand_p (lr_arg))
4914 if (operand_equal_p (ll_arg, rl_arg, 0)
4915 && operand_equal_p (lr_arg, rr_arg, 0))
4917 result = combine_comparisons (code, lcode, rcode,
4918 truth_type, ll_arg, lr_arg);
4922 else if (operand_equal_p (ll_arg, rr_arg, 0)
4923 && operand_equal_p (lr_arg, rl_arg, 0))
4925 result = combine_comparisons (code, lcode,
4926 swap_tree_comparison (rcode),
4927 truth_type, ll_arg, lr_arg);
4933 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4934 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4936 /* If the RHS can be evaluated unconditionally and its operands are
4937 simple, it wins to evaluate the RHS unconditionally on machines
4938 with expensive branches. In this case, this isn't a comparison
4939 that can be merged. Avoid doing this if the RHS is a floating-point
4940 comparison since those can trap. */
4942 if (BRANCH_COST >= 2
4943 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4944 && simple_operand_p (rl_arg)
4945 && simple_operand_p (rr_arg))
4947 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4948 if (code == TRUTH_OR_EXPR
4949 && lcode == NE_EXPR && integer_zerop (lr_arg)
4950 && rcode == NE_EXPR && integer_zerop (rr_arg)
4951 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4952 return build2 (NE_EXPR, truth_type,
4953 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4955 build_int_cst (TREE_TYPE (ll_arg), 0));
4957 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4958 if (code == TRUTH_AND_EXPR
4959 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4960 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4961 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4962 return build2 (EQ_EXPR, truth_type,
4963 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4965 build_int_cst (TREE_TYPE (ll_arg), 0));
4967 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4969 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
4970 return build2 (code, truth_type, lhs, rhs);
4975 /* See if the comparisons can be merged. Then get all the parameters for
4978 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4979 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4983 ll_inner = decode_field_reference (ll_arg,
4984 &ll_bitsize, &ll_bitpos, &ll_mode,
4985 &ll_unsignedp, &volatilep, &ll_mask,
4987 lr_inner = decode_field_reference (lr_arg,
4988 &lr_bitsize, &lr_bitpos, &lr_mode,
4989 &lr_unsignedp, &volatilep, &lr_mask,
4991 rl_inner = decode_field_reference (rl_arg,
4992 &rl_bitsize, &rl_bitpos, &rl_mode,
4993 &rl_unsignedp, &volatilep, &rl_mask,
4995 rr_inner = decode_field_reference (rr_arg,
4996 &rr_bitsize, &rr_bitpos, &rr_mode,
4997 &rr_unsignedp, &volatilep, &rr_mask,
5000 /* It must be true that the inner operation on the lhs of each
5001 comparison must be the same if we are to be able to do anything.
5002 Then see if we have constants. If not, the same must be true for
5004 if (volatilep || ll_inner == 0 || rl_inner == 0
5005 || ! operand_equal_p (ll_inner, rl_inner, 0))
5008 if (TREE_CODE (lr_arg) == INTEGER_CST
5009 && TREE_CODE (rr_arg) == INTEGER_CST)
5010 l_const = lr_arg, r_const = rr_arg;
5011 else if (lr_inner == 0 || rr_inner == 0
5012 || ! operand_equal_p (lr_inner, rr_inner, 0))
5015 l_const = r_const = 0;
5017 /* If either comparison code is not correct for our logical operation,
5018 fail. However, we can convert a one-bit comparison against zero into
5019 the opposite comparison against that bit being set in the field. */
5021 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5022 if (lcode != wanted_code)
5024 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5026 /* Make the left operand unsigned, since we are only interested
5027 in the value of one bit. Otherwise we are doing the wrong
5036 /* This is analogous to the code for l_const above. */
5037 if (rcode != wanted_code)
5039 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5048 /* After this point all optimizations will generate bit-field
5049 references, which we might not want. */
5050 if (! lang_hooks.can_use_bit_fields_p ())
5053 /* See if we can find a mode that contains both fields being compared on
5054 the left. If we can't, fail. Otherwise, update all constants and masks
5055 to be relative to a field of that size. */
5056 first_bit = MIN (ll_bitpos, rl_bitpos);
5057 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5058 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5059 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5061 if (lnmode == VOIDmode)
5064 lnbitsize = GET_MODE_BITSIZE (lnmode);
5065 lnbitpos = first_bit & ~ (lnbitsize - 1);
5066 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5067 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5069 if (BYTES_BIG_ENDIAN)
5071 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5072 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5075 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5076 size_int (xll_bitpos), 0);
5077 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5078 size_int (xrl_bitpos), 0);
5082 l_const = fold_convert (lntype, l_const);
5083 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5084 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5085 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5086 fold_build1 (BIT_NOT_EXPR,
5090 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5092 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5097 r_const = fold_convert (lntype, r_const);
5098 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5099 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5100 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5101 fold_build1 (BIT_NOT_EXPR,
5105 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5107 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5111 /* If the right sides are not constant, do the same for it. Also,
5112 disallow this optimization if a size or signedness mismatch occurs
5113 between the left and right sides. */
5116 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5117 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5118 /* Make sure the two fields on the right
5119 correspond to the left without being swapped. */
5120 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5123 first_bit = MIN (lr_bitpos, rr_bitpos);
5124 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5125 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5126 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5128 if (rnmode == VOIDmode)
5131 rnbitsize = GET_MODE_BITSIZE (rnmode);
5132 rnbitpos = first_bit & ~ (rnbitsize - 1);
5133 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5134 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5136 if (BYTES_BIG_ENDIAN)
5138 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5139 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5142 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5143 size_int (xlr_bitpos), 0);
5144 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5145 size_int (xrr_bitpos), 0);
5147 /* Make a mask that corresponds to both fields being compared.
5148 Do this for both items being compared. If the operands are the
5149 same size and the bits being compared are in the same position
5150 then we can do this by masking both and comparing the masked
5152 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5153 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5154 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5156 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5157 ll_unsignedp || rl_unsignedp);
5158 if (! all_ones_mask_p (ll_mask, lnbitsize))
5159 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5161 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5162 lr_unsignedp || rr_unsignedp);
5163 if (! all_ones_mask_p (lr_mask, rnbitsize))
5164 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5166 return build2 (wanted_code, truth_type, lhs, rhs);
5169 /* There is still another way we can do something: If both pairs of
5170 fields being compared are adjacent, we may be able to make a wider
5171 field containing them both.
5173 Note that we still must mask the lhs/rhs expressions. Furthermore,
5174 the mask must be shifted to account for the shift done by
5175 make_bit_field_ref. */
5176 if ((ll_bitsize + ll_bitpos == rl_bitpos
5177 && lr_bitsize + lr_bitpos == rr_bitpos)
5178 || (ll_bitpos == rl_bitpos + rl_bitsize
5179 && lr_bitpos == rr_bitpos + rr_bitsize))
5183 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5184 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5185 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5186 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5188 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5189 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5190 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5191 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5193 /* Convert to the smaller type before masking out unwanted bits. */
5195 if (lntype != rntype)
5197 if (lnbitsize > rnbitsize)
5199 lhs = fold_convert (rntype, lhs);
5200 ll_mask = fold_convert (rntype, ll_mask);
5203 else if (lnbitsize < rnbitsize)
5205 rhs = fold_convert (lntype, rhs);
5206 lr_mask = fold_convert (lntype, lr_mask);
5211 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5212 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5214 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5215 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5217 return build2 (wanted_code, truth_type, lhs, rhs);
5223 /* Handle the case of comparisons with constants. If there is something in
5224 common between the masks, those bits of the constants must be the same.
5225 If not, the condition is always false. Test for this to avoid generating
5226 incorrect code below. */
5227 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5228 if (! integer_zerop (result)
5229 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5230 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5232 if (wanted_code == NE_EXPR)
5234 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5235 return constant_boolean_node (true, truth_type);
5239 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5240 return constant_boolean_node (false, truth_type);
5244 /* Construct the expression we will return. First get the component
5245 reference we will make. Unless the mask is all ones the width of
5246 that field, perform the mask operation. Then compare with the
5248 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5249 ll_unsignedp || rl_unsignedp);
5251 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5252 if (! all_ones_mask_p (ll_mask, lnbitsize))
5253 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5255 return build2 (wanted_code, truth_type, result,
5256 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5259 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5263 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5266 enum tree_code op_code;
5267 tree comp_const = op1;
5269 int consts_equal, consts_lt;
5272 STRIP_SIGN_NOPS (arg0);
5274 op_code = TREE_CODE (arg0);
5275 minmax_const = TREE_OPERAND (arg0, 1);
5276 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5277 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5278 inner = TREE_OPERAND (arg0, 0);
5280 /* If something does not permit us to optimize, return the original tree. */
5281 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5282 || TREE_CODE (comp_const) != INTEGER_CST
5283 || TREE_CONSTANT_OVERFLOW (comp_const)
5284 || TREE_CODE (minmax_const) != INTEGER_CST
5285 || TREE_CONSTANT_OVERFLOW (minmax_const))
5288 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5289 and GT_EXPR, doing the rest with recursive calls using logical
5293 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5295 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5298 return invert_truthvalue (tem);
5304 fold_build2 (TRUTH_ORIF_EXPR, type,
5305 optimize_minmax_comparison
5306 (EQ_EXPR, type, arg0, comp_const),
5307 optimize_minmax_comparison
5308 (GT_EXPR, type, arg0, comp_const));
5311 if (op_code == MAX_EXPR && consts_equal)
5312 /* MAX (X, 0) == 0 -> X <= 0 */
5313 return fold_build2 (LE_EXPR, type, inner, comp_const);
5315 else if (op_code == MAX_EXPR && consts_lt)
5316 /* MAX (X, 0) == 5 -> X == 5 */
5317 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5319 else if (op_code == MAX_EXPR)
5320 /* MAX (X, 0) == -1 -> false */
5321 return omit_one_operand (type, integer_zero_node, inner);
5323 else if (consts_equal)
5324 /* MIN (X, 0) == 0 -> X >= 0 */
5325 return fold_build2 (GE_EXPR, type, inner, comp_const);
5328 /* MIN (X, 0) == 5 -> false */
5329 return omit_one_operand (type, integer_zero_node, inner);
5332 /* MIN (X, 0) == -1 -> X == -1 */
5333 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5336 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5337 /* MAX (X, 0) > 0 -> X > 0
5338 MAX (X, 0) > 5 -> X > 5 */
5339 return fold_build2 (GT_EXPR, type, inner, comp_const);
5341 else if (op_code == MAX_EXPR)
5342 /* MAX (X, 0) > -1 -> true */
5343 return omit_one_operand (type, integer_one_node, inner);
5345 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5346 /* MIN (X, 0) > 0 -> false
5347 MIN (X, 0) > 5 -> false */
5348 return omit_one_operand (type, integer_zero_node, inner);
5351 /* MIN (X, 0) > -1 -> X > -1 */
5352 return fold_build2 (GT_EXPR, type, inner, comp_const);
5359 /* T is an integer expression that is being multiplied, divided, or taken a
5360 modulus (CODE says which and what kind of divide or modulus) by a
5361 constant C. See if we can eliminate that operation by folding it with
5362 other operations already in T. WIDE_TYPE, if non-null, is a type that
5363 should be used for the computation if wider than our type.
5365 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5366 (X * 2) + (Y * 4). We must, however, be assured that either the original
5367 expression would not overflow or that overflow is undefined for the type
5368 in the language in question.
5370 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5371 the machine has a multiply-accumulate insn or that this is part of an
5372 addressing calculation.
5374 If we return a non-null expression, it is an equivalent form of the
5375 original computation, but need not be in the original type. */
5378 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5380 /* To avoid exponential search depth, refuse to allow recursion past
5381 three levels. Beyond that (1) it's highly unlikely that we'll find
5382 something interesting and (2) we've probably processed it before
5383 when we built the inner expression. */
5392 ret = extract_muldiv_1 (t, c, code, wide_type);
5399 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5401 tree type = TREE_TYPE (t);
5402 enum tree_code tcode = TREE_CODE (t);
5403 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5404 > GET_MODE_SIZE (TYPE_MODE (type)))
5405 ? wide_type : type);
5407 int same_p = tcode == code;
5408 tree op0 = NULL_TREE, op1 = NULL_TREE;
5410 /* Don't deal with constants of zero here; they confuse the code below. */
5411 if (integer_zerop (c))
5414 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5415 op0 = TREE_OPERAND (t, 0);
5417 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5418 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5420 /* Note that we need not handle conditional operations here since fold
5421 already handles those cases. So just do arithmetic here. */
5425 /* For a constant, we can always simplify if we are a multiply
5426 or (for divide and modulus) if it is a multiple of our constant. */
5427 if (code == MULT_EXPR
5428 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5429 return const_binop (code, fold_convert (ctype, t),
5430 fold_convert (ctype, c), 0);
5433 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5434 /* If op0 is an expression ... */
5435 if ((COMPARISON_CLASS_P (op0)
5436 || UNARY_CLASS_P (op0)
5437 || BINARY_CLASS_P (op0)
5438 || EXPRESSION_CLASS_P (op0))
5439 /* ... and is unsigned, and its type is smaller than ctype,
5440 then we cannot pass through as widening. */
5441 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5442 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5443 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5444 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5445 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5446 /* ... or this is a truncation (t is narrower than op0),
5447 then we cannot pass through this narrowing. */
5448 || (GET_MODE_SIZE (TYPE_MODE (type))
5449 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5450 /* ... or signedness changes for division or modulus,
5451 then we cannot pass through this conversion. */
5452 || (code != MULT_EXPR
5453 && (TYPE_UNSIGNED (ctype)
5454 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5457 /* Pass the constant down and see if we can make a simplification. If
5458 we can, replace this expression with the inner simplification for
5459 possible later conversion to our or some other type. */
5460 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5461 && TREE_CODE (t2) == INTEGER_CST
5462 && ! TREE_CONSTANT_OVERFLOW (t2)
5463 && (0 != (t1 = extract_muldiv (op0, t2, code,
5465 ? ctype : NULL_TREE))))
5470 /* If widening the type changes it from signed to unsigned, then we
5471 must avoid building ABS_EXPR itself as unsigned. */
5472 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5474 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5475 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5477 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5478 return fold_convert (ctype, t1);
5484 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5485 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5488 case MIN_EXPR: case MAX_EXPR:
5489 /* If widening the type changes the signedness, then we can't perform
5490 this optimization as that changes the result. */
5491 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5494 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5495 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5496 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5498 if (tree_int_cst_sgn (c) < 0)
5499 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5501 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5502 fold_convert (ctype, t2));
5506 case LSHIFT_EXPR: case RSHIFT_EXPR:
5507 /* If the second operand is constant, this is a multiplication
5508 or floor division, by a power of two, so we can treat it that
5509 way unless the multiplier or divisor overflows. Signed
5510 left-shift overflow is implementation-defined rather than
5511 undefined in C90, so do not convert signed left shift into
5513 if (TREE_CODE (op1) == INTEGER_CST
5514 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5515 /* const_binop may not detect overflow correctly,
5516 so check for it explicitly here. */
5517 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5518 && TREE_INT_CST_HIGH (op1) == 0
5519 && 0 != (t1 = fold_convert (ctype,
5520 const_binop (LSHIFT_EXPR,
5523 && ! TREE_OVERFLOW (t1))
5524 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5525 ? MULT_EXPR : FLOOR_DIV_EXPR,
5526 ctype, fold_convert (ctype, op0), t1),
5527 c, code, wide_type);
5530 case PLUS_EXPR: case MINUS_EXPR:
5531 /* See if we can eliminate the operation on both sides. If we can, we
5532 can return a new PLUS or MINUS. If we can't, the only remaining
5533 cases where we can do anything are if the second operand is a
5535 t1 = extract_muldiv (op0, c, code, wide_type);
5536 t2 = extract_muldiv (op1, c, code, wide_type);
5537 if (t1 != 0 && t2 != 0
5538 && (code == MULT_EXPR
5539 /* If not multiplication, we can only do this if both operands
5540 are divisible by c. */
5541 || (multiple_of_p (ctype, op0, c)
5542 && multiple_of_p (ctype, op1, c))))
5543 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5544 fold_convert (ctype, t2));
5546 /* If this was a subtraction, negate OP1 and set it to be an addition.
5547 This simplifies the logic below. */
5548 if (tcode == MINUS_EXPR)
5549 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5551 if (TREE_CODE (op1) != INTEGER_CST)
5554 /* If either OP1 or C are negative, this optimization is not safe for
5555 some of the division and remainder types while for others we need
5556 to change the code. */
5557 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5559 if (code == CEIL_DIV_EXPR)
5560 code = FLOOR_DIV_EXPR;
5561 else if (code == FLOOR_DIV_EXPR)
5562 code = CEIL_DIV_EXPR;
5563 else if (code != MULT_EXPR
5564 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5568 /* If it's a multiply or a division/modulus operation of a multiple
5569 of our constant, do the operation and verify it doesn't overflow. */
5570 if (code == MULT_EXPR
5571 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5573 op1 = const_binop (code, fold_convert (ctype, op1),
5574 fold_convert (ctype, c), 0);
5575 /* We allow the constant to overflow with wrapping semantics. */
5577 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5583 /* If we have an unsigned type is not a sizetype, we cannot widen
5584 the operation since it will change the result if the original
5585 computation overflowed. */
5586 if (TYPE_UNSIGNED (ctype)
5587 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5591 /* If we were able to eliminate our operation from the first side,
5592 apply our operation to the second side and reform the PLUS. */
5593 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5594 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5596 /* The last case is if we are a multiply. In that case, we can
5597 apply the distributive law to commute the multiply and addition
5598 if the multiplication of the constants doesn't overflow. */
5599 if (code == MULT_EXPR)
5600 return fold_build2 (tcode, ctype,
5601 fold_build2 (code, ctype,
5602 fold_convert (ctype, op0),
5603 fold_convert (ctype, c)),
5609 /* We have a special case here if we are doing something like
5610 (C * 8) % 4 since we know that's zero. */
5611 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5612 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5613 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5614 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5615 return omit_one_operand (type, integer_zero_node, op0);
5617 /* ... fall through ... */
5619 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5620 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5621 /* If we can extract our operation from the LHS, do so and return a
5622 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5623 do something only if the second operand is a constant. */
5625 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5626 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5627 fold_convert (ctype, op1));
5628 else if (tcode == MULT_EXPR && code == MULT_EXPR
5629 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5630 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5631 fold_convert (ctype, t1));
5632 else if (TREE_CODE (op1) != INTEGER_CST)
5635 /* If these are the same operation types, we can associate them
5636 assuming no overflow. */
5638 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5639 fold_convert (ctype, c), 0))
5640 && ! TREE_OVERFLOW (t1))
5641 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5643 /* If these operations "cancel" each other, we have the main
5644 optimizations of this pass, which occur when either constant is a
5645 multiple of the other, in which case we replace this with either an
5646 operation or CODE or TCODE.
5648 If we have an unsigned type that is not a sizetype, we cannot do
5649 this since it will change the result if the original computation
5651 if ((! TYPE_UNSIGNED (ctype)
5652 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5654 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5655 || (tcode == MULT_EXPR
5656 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5657 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5659 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5660 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5661 fold_convert (ctype,
5662 const_binop (TRUNC_DIV_EXPR,
5664 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5665 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5666 fold_convert (ctype,
5667 const_binop (TRUNC_DIV_EXPR,
5679 /* Return a node which has the indicated constant VALUE (either 0 or
5680 1), and is of the indicated TYPE. */
5683 constant_boolean_node (int value, tree type)
5685 if (type == integer_type_node)
5686 return value ? integer_one_node : integer_zero_node;
5687 else if (type == boolean_type_node)
5688 return value ? boolean_true_node : boolean_false_node;
5690 return build_int_cst (type, value);
5694 /* Return true if expr looks like an ARRAY_REF and set base and
5695 offset to the appropriate trees. If there is no offset,
5696 offset is set to NULL_TREE. Base will be canonicalized to
5697 something you can get the element type from using
5698 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5699 in bytes to the base. */
5702 extract_array_ref (tree expr, tree *base, tree *offset)
5704 /* One canonical form is a PLUS_EXPR with the first
5705 argument being an ADDR_EXPR with a possible NOP_EXPR
5707 if (TREE_CODE (expr) == PLUS_EXPR)
5709 tree op0 = TREE_OPERAND (expr, 0);
5710 tree inner_base, dummy1;
5711 /* Strip NOP_EXPRs here because the C frontends and/or
5712 folders present us (int *)&x.a + 4B possibly. */
5714 if (extract_array_ref (op0, &inner_base, &dummy1))
5717 if (dummy1 == NULL_TREE)
5718 *offset = TREE_OPERAND (expr, 1);
5720 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5721 dummy1, TREE_OPERAND (expr, 1));
5725 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5726 which we transform into an ADDR_EXPR with appropriate
5727 offset. For other arguments to the ADDR_EXPR we assume
5728 zero offset and as such do not care about the ADDR_EXPR
5729 type and strip possible nops from it. */
5730 else if (TREE_CODE (expr) == ADDR_EXPR)
5732 tree op0 = TREE_OPERAND (expr, 0);
5733 if (TREE_CODE (op0) == ARRAY_REF)
5735 tree idx = TREE_OPERAND (op0, 1);
5736 *base = TREE_OPERAND (op0, 0);
5737 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5738 array_ref_element_size (op0));
5742 /* Handle array-to-pointer decay as &a. */
5743 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5744 *base = TREE_OPERAND (expr, 0);
5747 *offset = NULL_TREE;
5751 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5752 else if (SSA_VAR_P (expr)
5753 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5756 *offset = NULL_TREE;
5764 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5765 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5766 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5767 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5768 COND is the first argument to CODE; otherwise (as in the example
5769 given here), it is the second argument. TYPE is the type of the
5770 original expression. Return NULL_TREE if no simplification is
5774 fold_binary_op_with_conditional_arg (enum tree_code code,
5775 tree type, tree op0, tree op1,
5776 tree cond, tree arg, int cond_first_p)
5778 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5779 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5780 tree test, true_value, false_value;
5781 tree lhs = NULL_TREE;
5782 tree rhs = NULL_TREE;
5784 /* This transformation is only worthwhile if we don't have to wrap
5785 arg in a SAVE_EXPR, and the operation can be simplified on at least
5786 one of the branches once its pushed inside the COND_EXPR. */
5787 if (!TREE_CONSTANT (arg))
5790 if (TREE_CODE (cond) == COND_EXPR)
5792 test = TREE_OPERAND (cond, 0);
5793 true_value = TREE_OPERAND (cond, 1);
5794 false_value = TREE_OPERAND (cond, 2);
5795 /* If this operand throws an expression, then it does not make
5796 sense to try to perform a logical or arithmetic operation
5798 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5800 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5805 tree testtype = TREE_TYPE (cond);
5807 true_value = constant_boolean_node (true, testtype);
5808 false_value = constant_boolean_node (false, testtype);
5811 arg = fold_convert (arg_type, arg);
5814 true_value = fold_convert (cond_type, true_value);
5816 lhs = fold_build2 (code, type, true_value, arg);
5818 lhs = fold_build2 (code, type, arg, true_value);
5822 false_value = fold_convert (cond_type, false_value);
5824 rhs = fold_build2 (code, type, false_value, arg);
5826 rhs = fold_build2 (code, type, arg, false_value);
5829 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5830 return fold_convert (type, test);
5834 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5836 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5837 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5838 ADDEND is the same as X.
5840 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5841 and finite. The problematic cases are when X is zero, and its mode
5842 has signed zeros. In the case of rounding towards -infinity,
5843 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5844 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5847 fold_real_zero_addition_p (tree type, tree addend, int negate)
5849 if (!real_zerop (addend))
5852 /* Don't allow the fold with -fsignaling-nans. */
5853 if (HONOR_SNANS (TYPE_MODE (type)))
5856 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5857 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5860 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5861 if (TREE_CODE (addend) == REAL_CST
5862 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5865 /* The mode has signed zeros, and we have to honor their sign.
5866 In this situation, there is only one case we can return true for.
5867 X - 0 is the same as X unless rounding towards -infinity is
5869 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5872 /* Subroutine of fold() that checks comparisons of built-in math
5873 functions against real constants.
5875 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5876 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5877 is the type of the result and ARG0 and ARG1 are the operands of the
5878 comparison. ARG1 must be a TREE_REAL_CST.
5880 The function returns the constant folded tree if a simplification
5881 can be made, and NULL_TREE otherwise. */
5884 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5885 tree type, tree arg0, tree arg1)
5889 if (BUILTIN_SQRT_P (fcode))
5891 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5892 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5894 c = TREE_REAL_CST (arg1);
5895 if (REAL_VALUE_NEGATIVE (c))
5897 /* sqrt(x) < y is always false, if y is negative. */
5898 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5899 return omit_one_operand (type, integer_zero_node, arg);
5901 /* sqrt(x) > y is always true, if y is negative and we
5902 don't care about NaNs, i.e. negative values of x. */
5903 if (code == NE_EXPR || !HONOR_NANS (mode))
5904 return omit_one_operand (type, integer_one_node, arg);
5906 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5907 return fold_build2 (GE_EXPR, type, arg,
5908 build_real (TREE_TYPE (arg), dconst0));
5910 else if (code == GT_EXPR || code == GE_EXPR)
5914 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5915 real_convert (&c2, mode, &c2);
5917 if (REAL_VALUE_ISINF (c2))
5919 /* sqrt(x) > y is x == +Inf, when y is very large. */
5920 if (HONOR_INFINITIES (mode))
5921 return fold_build2 (EQ_EXPR, type, arg,
5922 build_real (TREE_TYPE (arg), c2));
5924 /* sqrt(x) > y is always false, when y is very large
5925 and we don't care about infinities. */
5926 return omit_one_operand (type, integer_zero_node, arg);
5929 /* sqrt(x) > c is the same as x > c*c. */
5930 return fold_build2 (code, type, arg,
5931 build_real (TREE_TYPE (arg), c2));
5933 else if (code == LT_EXPR || code == LE_EXPR)
5937 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5938 real_convert (&c2, mode, &c2);
5940 if (REAL_VALUE_ISINF (c2))
5942 /* sqrt(x) < y is always true, when y is a very large
5943 value and we don't care about NaNs or Infinities. */
5944 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5945 return omit_one_operand (type, integer_one_node, arg);
5947 /* sqrt(x) < y is x != +Inf when y is very large and we
5948 don't care about NaNs. */
5949 if (! HONOR_NANS (mode))
5950 return fold_build2 (NE_EXPR, type, arg,
5951 build_real (TREE_TYPE (arg), c2));
5953 /* sqrt(x) < y is x >= 0 when y is very large and we
5954 don't care about Infinities. */
5955 if (! HONOR_INFINITIES (mode))
5956 return fold_build2 (GE_EXPR, type, arg,
5957 build_real (TREE_TYPE (arg), dconst0));
5959 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5960 if (lang_hooks.decls.global_bindings_p () != 0
5961 || CONTAINS_PLACEHOLDER_P (arg))
5964 arg = save_expr (arg);
5965 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5966 fold_build2 (GE_EXPR, type, arg,
5967 build_real (TREE_TYPE (arg),
5969 fold_build2 (NE_EXPR, type, arg,
5970 build_real (TREE_TYPE (arg),
5974 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5975 if (! HONOR_NANS (mode))
5976 return fold_build2 (code, type, arg,
5977 build_real (TREE_TYPE (arg), c2));
5979 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5980 if (lang_hooks.decls.global_bindings_p () == 0
5981 && ! CONTAINS_PLACEHOLDER_P (arg))
5983 arg = save_expr (arg);
5984 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5985 fold_build2 (GE_EXPR, type, arg,
5986 build_real (TREE_TYPE (arg),
5988 fold_build2 (code, type, arg,
5989 build_real (TREE_TYPE (arg),
5998 /* Subroutine of fold() that optimizes comparisons against Infinities,
5999 either +Inf or -Inf.
6001 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6002 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6003 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6005 The function returns the constant folded tree if a simplification
6006 can be made, and NULL_TREE otherwise. */
6009 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6011 enum machine_mode mode;
6012 REAL_VALUE_TYPE max;
6016 mode = TYPE_MODE (TREE_TYPE (arg0));
6018 /* For negative infinity swap the sense of the comparison. */
6019 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6021 code = swap_tree_comparison (code);
6026 /* x > +Inf is always false, if with ignore sNANs. */
6027 if (HONOR_SNANS (mode))
6029 return omit_one_operand (type, integer_zero_node, arg0);
6032 /* x <= +Inf is always true, if we don't case about NaNs. */
6033 if (! HONOR_NANS (mode))
6034 return omit_one_operand (type, integer_one_node, arg0);
6036 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6037 if (lang_hooks.decls.global_bindings_p () == 0
6038 && ! CONTAINS_PLACEHOLDER_P (arg0))
6040 arg0 = save_expr (arg0);
6041 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6047 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6048 real_maxval (&max, neg, mode);
6049 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6050 arg0, build_real (TREE_TYPE (arg0), max));
6053 /* x < +Inf is always equal to x <= DBL_MAX. */
6054 real_maxval (&max, neg, mode);
6055 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6056 arg0, build_real (TREE_TYPE (arg0), max));
6059 /* x != +Inf is always equal to !(x > DBL_MAX). */
6060 real_maxval (&max, neg, mode);
6061 if (! HONOR_NANS (mode))
6062 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6063 arg0, build_real (TREE_TYPE (arg0), max));
6065 /* The transformation below creates non-gimple code and thus is
6066 not appropriate if we are in gimple form. */
6070 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6071 arg0, build_real (TREE_TYPE (arg0), max));
6072 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6081 /* Subroutine of fold() that optimizes comparisons of a division by
6082 a nonzero integer constant against an integer constant, i.e.
6085 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6086 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6087 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6089 The function returns the constant folded tree if a simplification
6090 can be made, and NULL_TREE otherwise. */
6093 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6095 tree prod, tmp, hi, lo;
6096 tree arg00 = TREE_OPERAND (arg0, 0);
6097 tree arg01 = TREE_OPERAND (arg0, 1);
6098 unsigned HOST_WIDE_INT lpart;
6099 HOST_WIDE_INT hpart;
6103 /* We have to do this the hard way to detect unsigned overflow.
6104 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6105 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6106 TREE_INT_CST_HIGH (arg01),
6107 TREE_INT_CST_LOW (arg1),
6108 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6109 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6110 prod = force_fit_type (prod, -1, overflow, false);
6111 neg_overflow = false;
6113 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6115 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6118 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6119 overflow = add_double (TREE_INT_CST_LOW (prod),
6120 TREE_INT_CST_HIGH (prod),
6121 TREE_INT_CST_LOW (tmp),
6122 TREE_INT_CST_HIGH (tmp),
6124 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6125 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6126 TREE_CONSTANT_OVERFLOW (prod));
6128 else if (tree_int_cst_sgn (arg01) >= 0)
6130 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6131 switch (tree_int_cst_sgn (arg1))
6134 neg_overflow = true;
6135 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6140 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6145 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6155 /* A negative divisor reverses the relational operators. */
6156 code = swap_tree_comparison (code);
6158 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6159 switch (tree_int_cst_sgn (arg1))
6162 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6167 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6172 neg_overflow = true;
6173 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6185 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6186 return omit_one_operand (type, integer_zero_node, arg00);
6187 if (TREE_OVERFLOW (hi))
6188 return fold_build2 (GE_EXPR, type, arg00, lo);
6189 if (TREE_OVERFLOW (lo))
6190 return fold_build2 (LE_EXPR, type, arg00, hi);
6191 return build_range_check (type, arg00, 1, lo, hi);
6194 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6195 return omit_one_operand (type, integer_one_node, arg00);
6196 if (TREE_OVERFLOW (hi))
6197 return fold_build2 (LT_EXPR, type, arg00, lo);
6198 if (TREE_OVERFLOW (lo))
6199 return fold_build2 (GT_EXPR, type, arg00, hi);
6200 return build_range_check (type, arg00, 0, lo, hi);
6203 if (TREE_OVERFLOW (lo))
6205 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6206 return omit_one_operand (type, tmp, arg00);
6208 return fold_build2 (LT_EXPR, type, arg00, lo);
6211 if (TREE_OVERFLOW (hi))
6213 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6214 return omit_one_operand (type, tmp, arg00);
6216 return fold_build2 (LE_EXPR, type, arg00, hi);
6219 if (TREE_OVERFLOW (hi))
6221 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6222 return omit_one_operand (type, tmp, arg00);
6224 return fold_build2 (GT_EXPR, type, arg00, hi);
6227 if (TREE_OVERFLOW (lo))
6229 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6230 return omit_one_operand (type, tmp, arg00);
6232 return fold_build2 (GE_EXPR, type, arg00, lo);
6242 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6243 equality/inequality test, then return a simplified form of the test
6244 using a sign testing. Otherwise return NULL. TYPE is the desired
6248 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6251 /* If this is testing a single bit, we can optimize the test. */
6252 if ((code == NE_EXPR || code == EQ_EXPR)
6253 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6254 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6256 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6257 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6258 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6260 if (arg00 != NULL_TREE
6261 /* This is only a win if casting to a signed type is cheap,
6262 i.e. when arg00's type is not a partial mode. */
6263 && TYPE_PRECISION (TREE_TYPE (arg00))
6264 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6266 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6267 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6268 result_type, fold_convert (stype, arg00),
6269 build_int_cst (stype, 0));
6276 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6277 equality/inequality test, then return a simplified form of
6278 the test using shifts and logical operations. Otherwise return
6279 NULL. TYPE is the desired result type. */
6282 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6285 /* If this is testing a single bit, we can optimize the test. */
6286 if ((code == NE_EXPR || code == EQ_EXPR)
6287 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6288 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6290 tree inner = TREE_OPERAND (arg0, 0);
6291 tree type = TREE_TYPE (arg0);
6292 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6293 enum machine_mode operand_mode = TYPE_MODE (type);
6295 tree signed_type, unsigned_type, intermediate_type;
6298 /* First, see if we can fold the single bit test into a sign-bit
6300 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6305 /* Otherwise we have (A & C) != 0 where C is a single bit,
6306 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6307 Similarly for (A & C) == 0. */
6309 /* If INNER is a right shift of a constant and it plus BITNUM does
6310 not overflow, adjust BITNUM and INNER. */
6311 if (TREE_CODE (inner) == RSHIFT_EXPR
6312 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6313 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6314 && bitnum < TYPE_PRECISION (type)
6315 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6316 bitnum - TYPE_PRECISION (type)))
6318 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6319 inner = TREE_OPERAND (inner, 0);
6322 /* If we are going to be able to omit the AND below, we must do our
6323 operations as unsigned. If we must use the AND, we have a choice.
6324 Normally unsigned is faster, but for some machines signed is. */
6325 #ifdef LOAD_EXTEND_OP
6326 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6327 && !flag_syntax_only) ? 0 : 1;
6332 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6333 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6334 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6335 inner = fold_convert (intermediate_type, inner);
6338 inner = build2 (RSHIFT_EXPR, intermediate_type,
6339 inner, size_int (bitnum));
6341 if (code == EQ_EXPR)
6342 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6343 inner, integer_one_node);
6345 /* Put the AND last so it can combine with more things. */
6346 inner = build2 (BIT_AND_EXPR, intermediate_type,
6347 inner, integer_one_node);
6349 /* Make sure to return the proper type. */
6350 inner = fold_convert (result_type, inner);
6357 /* Check whether we are allowed to reorder operands arg0 and arg1,
6358 such that the evaluation of arg1 occurs before arg0. */
6361 reorder_operands_p (tree arg0, tree arg1)
6363 if (! flag_evaluation_order)
6365 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6367 return ! TREE_SIDE_EFFECTS (arg0)
6368 && ! TREE_SIDE_EFFECTS (arg1);
6371 /* Test whether it is preferable two swap two operands, ARG0 and
6372 ARG1, for example because ARG0 is an integer constant and ARG1
6373 isn't. If REORDER is true, only recommend swapping if we can
6374 evaluate the operands in reverse order. */
6377 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6379 STRIP_SIGN_NOPS (arg0);
6380 STRIP_SIGN_NOPS (arg1);
6382 if (TREE_CODE (arg1) == INTEGER_CST)
6384 if (TREE_CODE (arg0) == INTEGER_CST)
6387 if (TREE_CODE (arg1) == REAL_CST)
6389 if (TREE_CODE (arg0) == REAL_CST)
6392 if (TREE_CODE (arg1) == COMPLEX_CST)
6394 if (TREE_CODE (arg0) == COMPLEX_CST)
6397 if (TREE_CONSTANT (arg1))
6399 if (TREE_CONSTANT (arg0))
6405 if (reorder && flag_evaluation_order
6406 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6414 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6415 for commutative and comparison operators. Ensuring a canonical
6416 form allows the optimizers to find additional redundancies without
6417 having to explicitly check for both orderings. */
6418 if (TREE_CODE (arg0) == SSA_NAME
6419 && TREE_CODE (arg1) == SSA_NAME
6420 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6426 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6427 ARG0 is extended to a wider type. */
6430 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6432 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6434 tree shorter_type, outer_type;
6438 if (arg0_unw == arg0)
6440 shorter_type = TREE_TYPE (arg0_unw);
6442 #ifdef HAVE_canonicalize_funcptr_for_compare
6443 /* Disable this optimization if we're casting a function pointer
6444 type on targets that require function pointer canonicalization. */
6445 if (HAVE_canonicalize_funcptr_for_compare
6446 && TREE_CODE (shorter_type) == POINTER_TYPE
6447 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6451 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6454 arg1_unw = get_unwidened (arg1, shorter_type);
6456 /* If possible, express the comparison in the shorter mode. */
6457 if ((code == EQ_EXPR || code == NE_EXPR
6458 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6459 && (TREE_TYPE (arg1_unw) == shorter_type
6460 || (TREE_CODE (arg1_unw) == INTEGER_CST
6461 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6462 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6463 && int_fits_type_p (arg1_unw, shorter_type))))
6464 return fold_build2 (code, type, arg0_unw,
6465 fold_convert (shorter_type, arg1_unw));
6467 if (TREE_CODE (arg1_unw) != INTEGER_CST
6468 || TREE_CODE (shorter_type) != INTEGER_TYPE
6469 || !int_fits_type_p (arg1_unw, shorter_type))
6472 /* If we are comparing with the integer that does not fit into the range
6473 of the shorter type, the result is known. */
6474 outer_type = TREE_TYPE (arg1_unw);
6475 min = lower_bound_in_type (outer_type, shorter_type);
6476 max = upper_bound_in_type (outer_type, shorter_type);
6478 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6480 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6487 return omit_one_operand (type, integer_zero_node, arg0);
6492 return omit_one_operand (type, integer_one_node, arg0);
6498 return omit_one_operand (type, integer_one_node, arg0);
6500 return omit_one_operand (type, integer_zero_node, arg0);
6505 return omit_one_operand (type, integer_zero_node, arg0);
6507 return omit_one_operand (type, integer_one_node, arg0);
6516 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6517 ARG0 just the signedness is changed. */
6520 fold_sign_changed_comparison (enum tree_code code, tree type,
6521 tree arg0, tree arg1)
6523 tree arg0_inner, tmp;
6524 tree inner_type, outer_type;
6526 if (TREE_CODE (arg0) != NOP_EXPR
6527 && TREE_CODE (arg0) != CONVERT_EXPR)
6530 outer_type = TREE_TYPE (arg0);
6531 arg0_inner = TREE_OPERAND (arg0, 0);
6532 inner_type = TREE_TYPE (arg0_inner);
6534 #ifdef HAVE_canonicalize_funcptr_for_compare
6535 /* Disable this optimization if we're casting a function pointer
6536 type on targets that require function pointer canonicalization. */
6537 if (HAVE_canonicalize_funcptr_for_compare
6538 && TREE_CODE (inner_type) == POINTER_TYPE
6539 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6543 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6546 if (TREE_CODE (arg1) != INTEGER_CST
6547 && !((TREE_CODE (arg1) == NOP_EXPR
6548 || TREE_CODE (arg1) == CONVERT_EXPR)
6549 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6552 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6557 if (TREE_CODE (arg1) == INTEGER_CST)
6559 tmp = build_int_cst_wide (inner_type,
6560 TREE_INT_CST_LOW (arg1),
6561 TREE_INT_CST_HIGH (arg1));
6562 arg1 = force_fit_type (tmp, 0,
6563 TREE_OVERFLOW (arg1),
6564 TREE_CONSTANT_OVERFLOW (arg1));
6567 arg1 = fold_convert (inner_type, arg1);
6569 return fold_build2 (code, type, arg0_inner, arg1);
6572 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6573 step of the array. Reconstructs s and delta in the case of s * delta
6574 being an integer constant (and thus already folded).
6575 ADDR is the address. MULT is the multiplicative expression.
6576 If the function succeeds, the new address expression is returned. Otherwise
6577 NULL_TREE is returned. */
6580 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6582 tree s, delta, step;
6583 tree ref = TREE_OPERAND (addr, 0), pref;
6587 /* Canonicalize op1 into a possibly non-constant delta
6588 and an INTEGER_CST s. */
6589 if (TREE_CODE (op1) == MULT_EXPR)
6591 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6596 if (TREE_CODE (arg0) == INTEGER_CST)
6601 else if (TREE_CODE (arg1) == INTEGER_CST)
6609 else if (TREE_CODE (op1) == INTEGER_CST)
6616 /* Simulate we are delta * 1. */
6618 s = integer_one_node;
6621 for (;; ref = TREE_OPERAND (ref, 0))
6623 if (TREE_CODE (ref) == ARRAY_REF)
6625 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6629 step = array_ref_element_size (ref);
6630 if (TREE_CODE (step) != INTEGER_CST)
6635 if (! tree_int_cst_equal (step, s))
6640 /* Try if delta is a multiple of step. */
6641 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6650 if (!handled_component_p (ref))
6654 /* We found the suitable array reference. So copy everything up to it,
6655 and replace the index. */
6657 pref = TREE_OPERAND (addr, 0);
6658 ret = copy_node (pref);
6663 pref = TREE_OPERAND (pref, 0);
6664 TREE_OPERAND (pos, 0) = copy_node (pref);
6665 pos = TREE_OPERAND (pos, 0);
6668 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6669 fold_convert (itype,
6670 TREE_OPERAND (pos, 1)),
6671 fold_convert (itype, delta));
6673 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6677 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6678 means A >= Y && A != MAX, but in this case we know that
6679 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6682 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6684 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6686 if (TREE_CODE (bound) == LT_EXPR)
6687 a = TREE_OPERAND (bound, 0);
6688 else if (TREE_CODE (bound) == GT_EXPR)
6689 a = TREE_OPERAND (bound, 1);
6693 typea = TREE_TYPE (a);
6694 if (!INTEGRAL_TYPE_P (typea)
6695 && !POINTER_TYPE_P (typea))
6698 if (TREE_CODE (ineq) == LT_EXPR)
6700 a1 = TREE_OPERAND (ineq, 1);
6701 y = TREE_OPERAND (ineq, 0);
6703 else if (TREE_CODE (ineq) == GT_EXPR)
6705 a1 = TREE_OPERAND (ineq, 0);
6706 y = TREE_OPERAND (ineq, 1);
6711 if (TREE_TYPE (a1) != typea)
6714 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6715 if (!integer_onep (diff))
6718 return fold_build2 (GE_EXPR, type, a, y);
6721 /* Fold a sum or difference of at least one multiplication.
6722 Returns the folded tree or NULL if no simplification could be made. */
6725 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6727 tree arg00, arg01, arg10, arg11;
6728 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6730 /* (A * C) +- (B * C) -> (A+-B) * C.
6731 (A * C) +- A -> A * (C+-1).
6732 We are most concerned about the case where C is a constant,
6733 but other combinations show up during loop reduction. Since
6734 it is not difficult, try all four possibilities. */
6736 if (TREE_CODE (arg0) == MULT_EXPR)
6738 arg00 = TREE_OPERAND (arg0, 0);
6739 arg01 = TREE_OPERAND (arg0, 1);
6744 arg01 = fold_convert (type, integer_one_node);
6746 if (TREE_CODE (arg1) == MULT_EXPR)
6748 arg10 = TREE_OPERAND (arg1, 0);
6749 arg11 = TREE_OPERAND (arg1, 1);
6754 arg11 = fold_convert (type, integer_one_node);
6758 if (operand_equal_p (arg01, arg11, 0))
6759 same = arg01, alt0 = arg00, alt1 = arg10;
6760 else if (operand_equal_p (arg00, arg10, 0))
6761 same = arg00, alt0 = arg01, alt1 = arg11;
6762 else if (operand_equal_p (arg00, arg11, 0))
6763 same = arg00, alt0 = arg01, alt1 = arg10;
6764 else if (operand_equal_p (arg01, arg10, 0))
6765 same = arg01, alt0 = arg00, alt1 = arg11;
6767 /* No identical multiplicands; see if we can find a common
6768 power-of-two factor in non-power-of-two multiplies. This
6769 can help in multi-dimensional array access. */
6770 else if (host_integerp (arg01, 0)
6771 && host_integerp (arg11, 0))
6773 HOST_WIDE_INT int01, int11, tmp;
6776 int01 = TREE_INT_CST_LOW (arg01);
6777 int11 = TREE_INT_CST_LOW (arg11);
6779 /* Move min of absolute values to int11. */
6780 if ((int01 >= 0 ? int01 : -int01)
6781 < (int11 >= 0 ? int11 : -int11))
6783 tmp = int01, int01 = int11, int11 = tmp;
6784 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6791 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6793 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6794 build_int_cst (TREE_TYPE (arg00),
6799 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6804 return fold_build2 (MULT_EXPR, type,
6805 fold_build2 (code, type,
6806 fold_convert (type, alt0),
6807 fold_convert (type, alt1)),
6808 fold_convert (type, same));
6813 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6814 specified by EXPR into the buffer PTR of length LEN bytes.
6815 Return the number of bytes placed in the buffer, or zero
6819 native_encode_int (tree expr, unsigned char *ptr, int len)
6821 tree type = TREE_TYPE (expr);
6822 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6823 int byte, offset, word, words;
6824 unsigned char value;
6826 if (total_bytes > len)
6828 words = total_bytes / UNITS_PER_WORD;
6830 for (byte = 0; byte < total_bytes; byte++)
6832 int bitpos = byte * BITS_PER_UNIT;
6833 if (bitpos < HOST_BITS_PER_WIDE_INT)
6834 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6836 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6837 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6839 if (total_bytes > UNITS_PER_WORD)
6841 word = byte / UNITS_PER_WORD;
6842 if (WORDS_BIG_ENDIAN)
6843 word = (words - 1) - word;
6844 offset = word * UNITS_PER_WORD;
6845 if (BYTES_BIG_ENDIAN)
6846 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6848 offset += byte % UNITS_PER_WORD;
6851 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6852 ptr[offset] = value;
6858 /* Subroutine of native_encode_expr. Encode the REAL_CST
6859 specified by EXPR into the buffer PTR of length LEN bytes.
6860 Return the number of bytes placed in the buffer, or zero
6864 native_encode_real (tree expr, unsigned char *ptr, int len)
6866 tree type = TREE_TYPE (expr);
6867 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6868 int byte, offset, word, words;
6869 unsigned char value;
6871 /* There are always 32 bits in each long, no matter the size of
6872 the hosts long. We handle floating point representations with
6876 if (total_bytes > len)
6878 words = total_bytes / UNITS_PER_WORD;
6880 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6882 for (byte = 0; byte < total_bytes; byte++)
6884 int bitpos = byte * BITS_PER_UNIT;
6885 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6887 if (total_bytes > UNITS_PER_WORD)
6889 word = byte / UNITS_PER_WORD;
6890 if (FLOAT_WORDS_BIG_ENDIAN)
6891 word = (words - 1) - word;
6892 offset = word * UNITS_PER_WORD;
6893 if (BYTES_BIG_ENDIAN)
6894 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6896 offset += byte % UNITS_PER_WORD;
6899 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6900 ptr[offset] = value;
6905 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6906 specified by EXPR into the buffer PTR of length LEN bytes.
6907 Return the number of bytes placed in the buffer, or zero
6911 native_encode_complex (tree expr, unsigned char *ptr, int len)
6916 part = TREE_REALPART (expr);
6917 rsize = native_encode_expr (part, ptr, len);
6920 part = TREE_IMAGPART (expr);
6921 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6924 return rsize + isize;
6928 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6929 specified by EXPR into the buffer PTR of length LEN bytes.
6930 Return the number of bytes placed in the buffer, or zero
6934 native_encode_vector (tree expr, unsigned char *ptr, int len)
6936 int i, size, offset, count;
6937 tree itype, elem, elements;
6940 elements = TREE_VECTOR_CST_ELTS (expr);
6941 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6942 itype = TREE_TYPE (TREE_TYPE (expr));
6943 size = GET_MODE_SIZE (TYPE_MODE (itype));
6944 for (i = 0; i < count; i++)
6948 elem = TREE_VALUE (elements);
6949 elements = TREE_CHAIN (elements);
6956 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
6961 if (offset + size > len)
6963 memset (ptr+offset, 0, size);
6971 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6972 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6973 buffer PTR of length LEN bytes. Return the number of bytes
6974 placed in the buffer, or zero upon failure. */
6977 native_encode_expr (tree expr, unsigned char *ptr, int len)
6979 switch (TREE_CODE (expr))
6982 return native_encode_int (expr, ptr, len);
6985 return native_encode_real (expr, ptr, len);
6988 return native_encode_complex (expr, ptr, len);
6991 return native_encode_vector (expr, ptr, len);
6999 /* Subroutine of native_interpret_expr. Interpret the contents of
7000 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7001 If the buffer cannot be interpreted, return NULL_TREE. */
7004 native_interpret_int (tree type, unsigned char *ptr, int len)
7006 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7007 int byte, offset, word, words;
7008 unsigned char value;
7009 unsigned int HOST_WIDE_INT lo = 0;
7010 HOST_WIDE_INT hi = 0;
7012 if (total_bytes > len)
7014 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7016 words = total_bytes / UNITS_PER_WORD;
7018 for (byte = 0; byte < total_bytes; byte++)
7020 int bitpos = byte * BITS_PER_UNIT;
7021 if (total_bytes > UNITS_PER_WORD)
7023 word = byte / UNITS_PER_WORD;
7024 if (WORDS_BIG_ENDIAN)
7025 word = (words - 1) - word;
7026 offset = word * UNITS_PER_WORD;
7027 if (BYTES_BIG_ENDIAN)
7028 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7030 offset += byte % UNITS_PER_WORD;
7033 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7034 value = ptr[offset];
7036 if (bitpos < HOST_BITS_PER_WIDE_INT)
7037 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7039 hi |= (unsigned HOST_WIDE_INT) value
7040 << (bitpos - HOST_BITS_PER_WIDE_INT);
7043 return force_fit_type (build_int_cst_wide (type, lo, hi),
7048 /* Subroutine of native_interpret_expr. Interpret the contents of
7049 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7050 If the buffer cannot be interpreted, return NULL_TREE. */
7053 native_interpret_real (tree type, unsigned char *ptr, int len)
7055 enum machine_mode mode = TYPE_MODE (type);
7056 int total_bytes = GET_MODE_SIZE (mode);
7057 int byte, offset, word, words;
7058 unsigned char value;
7059 /* There are always 32 bits in each long, no matter the size of
7060 the hosts long. We handle floating point representations with
7065 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7066 if (total_bytes > len || total_bytes > 24)
7068 words = total_bytes / UNITS_PER_WORD;
7070 memset (tmp, 0, sizeof (tmp));
7071 for (byte = 0; byte < total_bytes; byte++)
7073 int bitpos = byte * BITS_PER_UNIT;
7074 if (total_bytes > UNITS_PER_WORD)
7076 word = byte / UNITS_PER_WORD;
7077 if (FLOAT_WORDS_BIG_ENDIAN)
7078 word = (words - 1) - word;
7079 offset = word * UNITS_PER_WORD;
7080 if (BYTES_BIG_ENDIAN)
7081 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7083 offset += byte % UNITS_PER_WORD;
7086 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7087 value = ptr[offset];
7089 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7092 real_from_target (&r, tmp, mode);
7093 return build_real (type, r);
7097 /* Subroutine of native_interpret_expr. Interpret the contents of
7098 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7099 If the buffer cannot be interpreted, return NULL_TREE. */
7102 native_interpret_complex (tree type, unsigned char *ptr, int len)
7104 tree etype, rpart, ipart;
7107 etype = TREE_TYPE (type);
7108 size = GET_MODE_SIZE (TYPE_MODE (etype));
7111 rpart = native_interpret_expr (etype, ptr, size);
7114 ipart = native_interpret_expr (etype, ptr+size, size);
7117 return build_complex (type, rpart, ipart);
7121 /* Subroutine of native_interpret_expr. Interpret the contents of
7122 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7123 If the buffer cannot be interpreted, return NULL_TREE. */
7126 native_interpret_vector (tree type, unsigned char *ptr, int len)
7128 tree etype, elem, elements;
7131 etype = TREE_TYPE (type);
7132 size = GET_MODE_SIZE (TYPE_MODE (etype));
7133 count = TYPE_VECTOR_SUBPARTS (type);
7134 if (size * count > len)
7137 elements = NULL_TREE;
7138 for (i = count - 1; i >= 0; i--)
7140 elem = native_interpret_expr (etype, ptr+(i*size), size);
7143 elements = tree_cons (NULL_TREE, elem, elements);
7145 return build_vector (type, elements);
7149 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7150 the buffer PTR of length LEN as a constant of type TYPE. For
7151 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7152 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7153 return NULL_TREE. */
7156 native_interpret_expr (tree type, unsigned char *ptr, int len)
7158 switch (TREE_CODE (type))
7163 return native_interpret_int (type, ptr, len);
7166 return native_interpret_real (type, ptr, len);
7169 return native_interpret_complex (type, ptr, len);
7172 return native_interpret_vector (type, ptr, len);
7180 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7181 TYPE at compile-time. If we're unable to perform the conversion
7182 return NULL_TREE. */
7185 fold_view_convert_expr (tree type, tree expr)
7187 /* We support up to 512-bit values (for V8DFmode). */
7188 unsigned char buffer[64];
7191 /* Check that the host and target are sane. */
7192 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7195 len = native_encode_expr (expr, buffer, sizeof (buffer));
7199 return native_interpret_expr (type, buffer, len);
7203 /* Fold a unary expression of code CODE and type TYPE with operand
7204 OP0. Return the folded expression if folding is successful.
7205 Otherwise, return NULL_TREE. */
7208 fold_unary (enum tree_code code, tree type, tree op0)
7212 enum tree_code_class kind = TREE_CODE_CLASS (code);
7214 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7215 && TREE_CODE_LENGTH (code) == 1);
7220 if (code == NOP_EXPR || code == CONVERT_EXPR
7221 || code == FLOAT_EXPR || code == ABS_EXPR)
7223 /* Don't use STRIP_NOPS, because signedness of argument type
7225 STRIP_SIGN_NOPS (arg0);
7229 /* Strip any conversions that don't change the mode. This
7230 is safe for every expression, except for a comparison
7231 expression because its signedness is derived from its
7234 Note that this is done as an internal manipulation within
7235 the constant folder, in order to find the simplest
7236 representation of the arguments so that their form can be
7237 studied. In any cases, the appropriate type conversions
7238 should be put back in the tree that will get out of the
7244 if (TREE_CODE_CLASS (code) == tcc_unary)
7246 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7247 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7248 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7249 else if (TREE_CODE (arg0) == COND_EXPR)
7251 tree arg01 = TREE_OPERAND (arg0, 1);
7252 tree arg02 = TREE_OPERAND (arg0, 2);
7253 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7254 arg01 = fold_build1 (code, type, arg01);
7255 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7256 arg02 = fold_build1 (code, type, arg02);
7257 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7260 /* If this was a conversion, and all we did was to move into
7261 inside the COND_EXPR, bring it back out. But leave it if
7262 it is a conversion from integer to integer and the
7263 result precision is no wider than a word since such a
7264 conversion is cheap and may be optimized away by combine,
7265 while it couldn't if it were outside the COND_EXPR. Then return
7266 so we don't get into an infinite recursion loop taking the
7267 conversion out and then back in. */
7269 if ((code == NOP_EXPR || code == CONVERT_EXPR
7270 || code == NON_LVALUE_EXPR)
7271 && TREE_CODE (tem) == COND_EXPR
7272 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7273 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7274 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7275 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7276 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7277 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7278 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7280 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7281 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7282 || flag_syntax_only))
7283 tem = build1 (code, type,
7285 TREE_TYPE (TREE_OPERAND
7286 (TREE_OPERAND (tem, 1), 0)),
7287 TREE_OPERAND (tem, 0),
7288 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7289 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7292 else if (COMPARISON_CLASS_P (arg0))
7294 if (TREE_CODE (type) == BOOLEAN_TYPE)
7296 arg0 = copy_node (arg0);
7297 TREE_TYPE (arg0) = type;
7300 else if (TREE_CODE (type) != INTEGER_TYPE)
7301 return fold_build3 (COND_EXPR, type, arg0,
7302 fold_build1 (code, type,
7304 fold_build1 (code, type,
7305 integer_zero_node));
7314 case FIX_TRUNC_EXPR:
7316 case FIX_FLOOR_EXPR:
7317 case FIX_ROUND_EXPR:
7318 if (TREE_TYPE (op0) == type)
7321 /* If we have (type) (a CMP b) and type is an integral type, return
7322 new expression involving the new type. */
7323 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7324 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7325 TREE_OPERAND (op0, 1));
7327 /* Handle cases of two conversions in a row. */
7328 if (TREE_CODE (op0) == NOP_EXPR
7329 || TREE_CODE (op0) == CONVERT_EXPR)
7331 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7332 tree inter_type = TREE_TYPE (op0);
7333 int inside_int = INTEGRAL_TYPE_P (inside_type);
7334 int inside_ptr = POINTER_TYPE_P (inside_type);
7335 int inside_float = FLOAT_TYPE_P (inside_type);
7336 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7337 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7338 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7339 int inter_int = INTEGRAL_TYPE_P (inter_type);
7340 int inter_ptr = POINTER_TYPE_P (inter_type);
7341 int inter_float = FLOAT_TYPE_P (inter_type);
7342 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7343 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7344 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7345 int final_int = INTEGRAL_TYPE_P (type);
7346 int final_ptr = POINTER_TYPE_P (type);
7347 int final_float = FLOAT_TYPE_P (type);
7348 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7349 unsigned int final_prec = TYPE_PRECISION (type);
7350 int final_unsignedp = TYPE_UNSIGNED (type);
7352 /* In addition to the cases of two conversions in a row
7353 handled below, if we are converting something to its own
7354 type via an object of identical or wider precision, neither
7355 conversion is needed. */
7356 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7357 && (((inter_int || inter_ptr) && final_int)
7358 || (inter_float && final_float))
7359 && inter_prec >= final_prec)
7360 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7362 /* Likewise, if the intermediate and final types are either both
7363 float or both integer, we don't need the middle conversion if
7364 it is wider than the final type and doesn't change the signedness
7365 (for integers). Avoid this if the final type is a pointer
7366 since then we sometimes need the inner conversion. Likewise if
7367 the outer has a precision not equal to the size of its mode. */
7368 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7369 || (inter_float && inside_float)
7370 || (inter_vec && inside_vec))
7371 && inter_prec >= inside_prec
7372 && (inter_float || inter_vec
7373 || inter_unsignedp == inside_unsignedp)
7374 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7375 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7377 && (! final_vec || inter_prec == inside_prec))
7378 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7380 /* If we have a sign-extension of a zero-extended value, we can
7381 replace that by a single zero-extension. */
7382 if (inside_int && inter_int && final_int
7383 && inside_prec < inter_prec && inter_prec < final_prec
7384 && inside_unsignedp && !inter_unsignedp)
7385 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7387 /* Two conversions in a row are not needed unless:
7388 - some conversion is floating-point (overstrict for now), or
7389 - some conversion is a vector (overstrict for now), or
7390 - the intermediate type is narrower than both initial and
7392 - the intermediate type and innermost type differ in signedness,
7393 and the outermost type is wider than the intermediate, or
7394 - the initial type is a pointer type and the precisions of the
7395 intermediate and final types differ, or
7396 - the final type is a pointer type and the precisions of the
7397 initial and intermediate types differ.
7398 - the final type is a pointer type and the initial type not
7399 - the initial type is a pointer to an array and the final type
7401 if (! inside_float && ! inter_float && ! final_float
7402 && ! inside_vec && ! inter_vec && ! final_vec
7403 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7404 && ! (inside_int && inter_int
7405 && inter_unsignedp != inside_unsignedp
7406 && inter_prec < final_prec)
7407 && ((inter_unsignedp && inter_prec > inside_prec)
7408 == (final_unsignedp && final_prec > inter_prec))
7409 && ! (inside_ptr && inter_prec != final_prec)
7410 && ! (final_ptr && inside_prec != inter_prec)
7411 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7412 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7413 && final_ptr == inside_ptr
7415 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7416 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7417 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7420 /* Handle (T *)&A.B.C for A being of type T and B and C
7421 living at offset zero. This occurs frequently in
7422 C++ upcasting and then accessing the base. */
7423 if (TREE_CODE (op0) == ADDR_EXPR
7424 && POINTER_TYPE_P (type)
7425 && handled_component_p (TREE_OPERAND (op0, 0)))
7427 HOST_WIDE_INT bitsize, bitpos;
7429 enum machine_mode mode;
7430 int unsignedp, volatilep;
7431 tree base = TREE_OPERAND (op0, 0);
7432 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7433 &mode, &unsignedp, &volatilep, false);
7434 /* If the reference was to a (constant) zero offset, we can use
7435 the address of the base if it has the same base type
7436 as the result type. */
7437 if (! offset && bitpos == 0
7438 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7439 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7440 return fold_convert (type, build_fold_addr_expr (base));
7443 if (TREE_CODE (op0) == MODIFY_EXPR
7444 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7445 /* Detect assigning a bitfield. */
7446 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7447 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7449 /* Don't leave an assignment inside a conversion
7450 unless assigning a bitfield. */
7451 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7452 /* First do the assignment, then return converted constant. */
7453 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7454 TREE_NO_WARNING (tem) = 1;
7455 TREE_USED (tem) = 1;
7459 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7460 constants (if x has signed type, the sign bit cannot be set
7461 in c). This folds extension into the BIT_AND_EXPR. */
7462 if (INTEGRAL_TYPE_P (type)
7463 && TREE_CODE (type) != BOOLEAN_TYPE
7464 && TREE_CODE (op0) == BIT_AND_EXPR
7465 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7468 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7471 if (TYPE_UNSIGNED (TREE_TYPE (and))
7472 || (TYPE_PRECISION (type)
7473 <= TYPE_PRECISION (TREE_TYPE (and))))
7475 else if (TYPE_PRECISION (TREE_TYPE (and1))
7476 <= HOST_BITS_PER_WIDE_INT
7477 && host_integerp (and1, 1))
7479 unsigned HOST_WIDE_INT cst;
7481 cst = tree_low_cst (and1, 1);
7482 cst &= (HOST_WIDE_INT) -1
7483 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7484 change = (cst == 0);
7485 #ifdef LOAD_EXTEND_OP
7487 && !flag_syntax_only
7488 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7491 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7492 and0 = fold_convert (uns, and0);
7493 and1 = fold_convert (uns, and1);
7499 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7500 TREE_INT_CST_HIGH (and1));
7501 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7502 TREE_CONSTANT_OVERFLOW (and1));
7503 return fold_build2 (BIT_AND_EXPR, type,
7504 fold_convert (type, and0), tem);
7508 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7509 T2 being pointers to types of the same size. */
7510 if (POINTER_TYPE_P (type)
7511 && BINARY_CLASS_P (arg0)
7512 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7513 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7515 tree arg00 = TREE_OPERAND (arg0, 0);
7517 tree t1 = TREE_TYPE (arg00);
7518 tree tt0 = TREE_TYPE (t0);
7519 tree tt1 = TREE_TYPE (t1);
7520 tree s0 = TYPE_SIZE (tt0);
7521 tree s1 = TYPE_SIZE (tt1);
7523 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7524 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7525 TREE_OPERAND (arg0, 1));
7528 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7529 of the same precision, and X is a integer type not narrower than
7530 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7531 if (INTEGRAL_TYPE_P (type)
7532 && TREE_CODE (op0) == BIT_NOT_EXPR
7533 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7534 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7535 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7536 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7538 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7539 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7540 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7541 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7544 tem = fold_convert_const (code, type, arg0);
7545 return tem ? tem : NULL_TREE;
7547 case VIEW_CONVERT_EXPR:
7548 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7549 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7550 return fold_view_convert_expr (type, op0);
7553 tem = fold_negate_expr (arg0);
7555 return fold_convert (type, tem);
7559 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7560 return fold_abs_const (arg0, type);
7561 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7562 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7563 /* Convert fabs((double)float) into (double)fabsf(float). */
7564 else if (TREE_CODE (arg0) == NOP_EXPR
7565 && TREE_CODE (type) == REAL_TYPE)
7567 tree targ0 = strip_float_extensions (arg0);
7569 return fold_convert (type, fold_build1 (ABS_EXPR,
7573 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7574 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7577 /* Strip sign ops from argument. */
7578 if (TREE_CODE (type) == REAL_TYPE)
7580 tem = fold_strip_sign_ops (arg0);
7582 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7587 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7588 return fold_convert (type, arg0);
7589 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7591 tree itype = TREE_TYPE (type);
7592 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7593 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7594 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7596 if (TREE_CODE (arg0) == COMPLEX_CST)
7598 tree itype = TREE_TYPE (type);
7599 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7600 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7601 return build_complex (type, rpart, negate_expr (ipart));
7603 if (TREE_CODE (arg0) == CONJ_EXPR)
7604 return fold_convert (type, TREE_OPERAND (arg0, 0));
7608 if (TREE_CODE (arg0) == INTEGER_CST)
7609 return fold_not_const (arg0, type);
7610 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7611 return TREE_OPERAND (arg0, 0);
7612 /* Convert ~ (-A) to A - 1. */
7613 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7614 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7615 build_int_cst (type, 1));
7616 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7617 else if (INTEGRAL_TYPE_P (type)
7618 && ((TREE_CODE (arg0) == MINUS_EXPR
7619 && integer_onep (TREE_OPERAND (arg0, 1)))
7620 || (TREE_CODE (arg0) == PLUS_EXPR
7621 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7622 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7623 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7624 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7625 && (tem = fold_unary (BIT_NOT_EXPR, type,
7627 TREE_OPERAND (arg0, 0)))))
7628 return fold_build2 (BIT_XOR_EXPR, type, tem,
7629 fold_convert (type, TREE_OPERAND (arg0, 1)));
7630 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7631 && (tem = fold_unary (BIT_NOT_EXPR, type,
7633 TREE_OPERAND (arg0, 1)))))
7634 return fold_build2 (BIT_XOR_EXPR, type,
7635 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7639 case TRUTH_NOT_EXPR:
7640 /* The argument to invert_truthvalue must have Boolean type. */
7641 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7642 arg0 = fold_convert (boolean_type_node, arg0);
7644 /* Note that the operand of this must be an int
7645 and its values must be 0 or 1.
7646 ("true" is a fixed value perhaps depending on the language,
7647 but we don't handle values other than 1 correctly yet.) */
7648 tem = fold_truth_not_expr (arg0);
7651 return fold_convert (type, tem);
7654 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7655 return fold_convert (type, arg0);
7656 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7657 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7658 TREE_OPERAND (arg0, 1));
7659 if (TREE_CODE (arg0) == COMPLEX_CST)
7660 return fold_convert (type, TREE_REALPART (arg0));
7661 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7663 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7664 tem = fold_build2 (TREE_CODE (arg0), itype,
7665 fold_build1 (REALPART_EXPR, itype,
7666 TREE_OPERAND (arg0, 0)),
7667 fold_build1 (REALPART_EXPR, itype,
7668 TREE_OPERAND (arg0, 1)));
7669 return fold_convert (type, tem);
7671 if (TREE_CODE (arg0) == CONJ_EXPR)
7673 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7674 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7675 return fold_convert (type, tem);
7680 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7681 return fold_convert (type, integer_zero_node);
7682 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7683 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7684 TREE_OPERAND (arg0, 0));
7685 if (TREE_CODE (arg0) == COMPLEX_CST)
7686 return fold_convert (type, TREE_IMAGPART (arg0));
7687 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7689 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7690 tem = fold_build2 (TREE_CODE (arg0), itype,
7691 fold_build1 (IMAGPART_EXPR, itype,
7692 TREE_OPERAND (arg0, 0)),
7693 fold_build1 (IMAGPART_EXPR, itype,
7694 TREE_OPERAND (arg0, 1)));
7695 return fold_convert (type, tem);
7697 if (TREE_CODE (arg0) == CONJ_EXPR)
7699 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7700 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7701 return fold_convert (type, negate_expr (tem));
7707 } /* switch (code) */
7710 /* Fold a binary expression of code CODE and type TYPE with operands
7711 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7712 Return the folded expression if folding is successful. Otherwise,
7713 return NULL_TREE. */
7716 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7718 enum tree_code compl_code;
7720 if (code == MIN_EXPR)
7721 compl_code = MAX_EXPR;
7722 else if (code == MAX_EXPR)
7723 compl_code = MIN_EXPR;
7727 /* MIN (MAX (a, b), b) == b. Â */
7728 if (TREE_CODE (op0) == compl_code
7729 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7730 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7732 /* MIN (MAX (b, a), b) == b. Â */
7733 if (TREE_CODE (op0) == compl_code
7734 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7735 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7736 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7738 /* MIN (a, MAX (a, b)) == a. Â */
7739 if (TREE_CODE (op1) == compl_code
7740 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7741 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7742 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7744 /* MIN (a, MAX (b, a)) == a. Â */
7745 if (TREE_CODE (op1) == compl_code
7746 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7747 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7748 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7753 /* Subroutine of fold_binary. This routine performs all of the
7754 transformations that are common to the equality/inequality
7755 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7756 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7757 fold_binary should call fold_binary. Fold a comparison with
7758 tree code CODE and type TYPE with operands OP0 and OP1. Return
7759 the folded comparison or NULL_TREE. */
7762 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7764 tree arg0, arg1, tem;
7769 STRIP_SIGN_NOPS (arg0);
7770 STRIP_SIGN_NOPS (arg1);
7772 tem = fold_relational_const (code, type, arg0, arg1);
7773 if (tem != NULL_TREE)
7776 /* If one arg is a real or integer constant, put it last. */
7777 if (tree_swap_operands_p (arg0, arg1, true))
7778 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7780 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7781 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7782 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7783 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7784 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7785 && !(flag_wrapv || flag_trapv))
7786 && (TREE_CODE (arg1) == INTEGER_CST
7787 && !TREE_OVERFLOW (arg1)))
7789 tree const1 = TREE_OPERAND (arg0, 1);
7791 tree variable = TREE_OPERAND (arg0, 0);
7794 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7796 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7797 TREE_TYPE (arg1), const2, const1);
7798 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7799 && (TREE_CODE (lhs) != INTEGER_CST
7800 || !TREE_OVERFLOW (lhs)))
7801 return fold_build2 (code, type, variable, lhs);
7804 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7805 same object, then we can fold this to a comparison of the two offsets in
7806 signed size type. This is possible because pointer arithmetic is
7807 restricted to retain within an object and overflow on pointer differences
7808 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7809 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7810 && !flag_wrapv && !flag_trapv)
7812 tree base0, offset0, base1, offset1;
7814 if (extract_array_ref (arg0, &base0, &offset0)
7815 && extract_array_ref (arg1, &base1, &offset1)
7816 && operand_equal_p (base0, base1, 0))
7818 tree signed_size_type_node;
7819 signed_size_type_node = signed_type_for (size_type_node);
7821 /* By converting to signed size type we cover middle-end pointer
7822 arithmetic which operates on unsigned pointer types of size
7823 type size and ARRAY_REF offsets which are properly sign or
7824 zero extended from their type in case it is narrower than
7826 if (offset0 == NULL_TREE)
7827 offset0 = build_int_cst (signed_size_type_node, 0);
7829 offset0 = fold_convert (signed_size_type_node, offset0);
7830 if (offset1 == NULL_TREE)
7831 offset1 = build_int_cst (signed_size_type_node, 0);
7833 offset1 = fold_convert (signed_size_type_node, offset1);
7835 return fold_build2 (code, type, offset0, offset1);
7839 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7841 tree targ0 = strip_float_extensions (arg0);
7842 tree targ1 = strip_float_extensions (arg1);
7843 tree newtype = TREE_TYPE (targ0);
7845 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7846 newtype = TREE_TYPE (targ1);
7848 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7849 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7850 return fold_build2 (code, type, fold_convert (newtype, targ0),
7851 fold_convert (newtype, targ1));
7853 /* (-a) CMP (-b) -> b CMP a */
7854 if (TREE_CODE (arg0) == NEGATE_EXPR
7855 && TREE_CODE (arg1) == NEGATE_EXPR)
7856 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7857 TREE_OPERAND (arg0, 0));
7859 if (TREE_CODE (arg1) == REAL_CST)
7861 REAL_VALUE_TYPE cst;
7862 cst = TREE_REAL_CST (arg1);
7864 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7865 if (TREE_CODE (arg0) == NEGATE_EXPR)
7866 return fold_build2 (swap_tree_comparison (code), type,
7867 TREE_OPERAND (arg0, 0),
7868 build_real (TREE_TYPE (arg1),
7869 REAL_VALUE_NEGATE (cst)));
7871 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7872 /* a CMP (-0) -> a CMP 0 */
7873 if (REAL_VALUE_MINUS_ZERO (cst))
7874 return fold_build2 (code, type, arg0,
7875 build_real (TREE_TYPE (arg1), dconst0));
7877 /* x != NaN is always true, other ops are always false. */
7878 if (REAL_VALUE_ISNAN (cst)
7879 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7881 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7882 return omit_one_operand (type, tem, arg0);
7885 /* Fold comparisons against infinity. */
7886 if (REAL_VALUE_ISINF (cst))
7888 tem = fold_inf_compare (code, type, arg0, arg1);
7889 if (tem != NULL_TREE)
7894 /* If this is a comparison of a real constant with a PLUS_EXPR
7895 or a MINUS_EXPR of a real constant, we can convert it into a
7896 comparison with a revised real constant as long as no overflow
7897 occurs when unsafe_math_optimizations are enabled. */
7898 if (flag_unsafe_math_optimizations
7899 && TREE_CODE (arg1) == REAL_CST
7900 && (TREE_CODE (arg0) == PLUS_EXPR
7901 || TREE_CODE (arg0) == MINUS_EXPR)
7902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7903 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7904 ? MINUS_EXPR : PLUS_EXPR,
7905 arg1, TREE_OPERAND (arg0, 1), 0))
7906 && ! TREE_CONSTANT_OVERFLOW (tem))
7907 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7909 /* Likewise, we can simplify a comparison of a real constant with
7910 a MINUS_EXPR whose first operand is also a real constant, i.e.
7911 (c1 - x) < c2 becomes x > c1-c2. */
7912 if (flag_unsafe_math_optimizations
7913 && TREE_CODE (arg1) == REAL_CST
7914 && TREE_CODE (arg0) == MINUS_EXPR
7915 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7916 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7918 && ! TREE_CONSTANT_OVERFLOW (tem))
7919 return fold_build2 (swap_tree_comparison (code), type,
7920 TREE_OPERAND (arg0, 1), tem);
7922 /* Fold comparisons against built-in math functions. */
7923 if (TREE_CODE (arg1) == REAL_CST
7924 && flag_unsafe_math_optimizations
7925 && ! flag_errno_math)
7927 enum built_in_function fcode = builtin_mathfn_code (arg0);
7929 if (fcode != END_BUILTINS)
7931 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7932 if (tem != NULL_TREE)
7938 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7939 if (TREE_CONSTANT (arg1)
7940 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7941 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7942 /* This optimization is invalid for ordered comparisons
7943 if CONST+INCR overflows or if foo+incr might overflow.
7944 This optimization is invalid for floating point due to rounding.
7945 For pointer types we assume overflow doesn't happen. */
7946 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7947 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7948 && (code == EQ_EXPR || code == NE_EXPR))))
7950 tree varop, newconst;
7952 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7954 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7955 arg1, TREE_OPERAND (arg0, 1));
7956 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7957 TREE_OPERAND (arg0, 0),
7958 TREE_OPERAND (arg0, 1));
7962 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7963 arg1, TREE_OPERAND (arg0, 1));
7964 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7965 TREE_OPERAND (arg0, 0),
7966 TREE_OPERAND (arg0, 1));
7970 /* If VAROP is a reference to a bitfield, we must mask
7971 the constant by the width of the field. */
7972 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7973 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7974 && host_integerp (DECL_SIZE (TREE_OPERAND
7975 (TREE_OPERAND (varop, 0), 1)), 1))
7977 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7978 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7979 tree folded_compare, shift;
7981 /* First check whether the comparison would come out
7982 always the same. If we don't do that we would
7983 change the meaning with the masking. */
7984 folded_compare = fold_build2 (code, type,
7985 TREE_OPERAND (varop, 0), arg1);
7986 if (TREE_CODE (folded_compare) == INTEGER_CST)
7987 return omit_one_operand (type, folded_compare, varop);
7989 shift = build_int_cst (NULL_TREE,
7990 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7991 shift = fold_convert (TREE_TYPE (varop), shift);
7992 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7994 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7998 return fold_build2 (code, type, varop, newconst);
8001 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8002 && (TREE_CODE (arg0) == NOP_EXPR
8003 || TREE_CODE (arg0) == CONVERT_EXPR))
8005 /* If we are widening one operand of an integer comparison,
8006 see if the other operand is similarly being widened. Perhaps we
8007 can do the comparison in the narrower type. */
8008 tem = fold_widened_comparison (code, type, arg0, arg1);
8012 /* Or if we are changing signedness. */
8013 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8018 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8019 constant, we can simplify it. */
8020 if (TREE_CODE (arg1) == INTEGER_CST
8021 && (TREE_CODE (arg0) == MIN_EXPR
8022 || TREE_CODE (arg0) == MAX_EXPR)
8023 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8025 tem = optimize_minmax_comparison (code, type, op0, op1);
8030 /* Simplify comparison of something with itself. (For IEEE
8031 floating-point, we can only do some of these simplifications.) */
8032 if (operand_equal_p (arg0, arg1, 0))
8037 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8038 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8039 return constant_boolean_node (1, type);
8044 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8045 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8046 return constant_boolean_node (1, type);
8047 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8050 /* For NE, we can only do this simplification if integer
8051 or we don't honor IEEE floating point NaNs. */
8052 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8053 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8055 /* ... fall through ... */
8058 return constant_boolean_node (0, type);
8064 /* If we are comparing an expression that just has comparisons
8065 of two integer values, arithmetic expressions of those comparisons,
8066 and constants, we can simplify it. There are only three cases
8067 to check: the two values can either be equal, the first can be
8068 greater, or the second can be greater. Fold the expression for
8069 those three values. Since each value must be 0 or 1, we have
8070 eight possibilities, each of which corresponds to the constant 0
8071 or 1 or one of the six possible comparisons.
8073 This handles common cases like (a > b) == 0 but also handles
8074 expressions like ((x > y) - (y > x)) > 0, which supposedly
8075 occur in macroized code. */
8077 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8079 tree cval1 = 0, cval2 = 0;
8082 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8083 /* Don't handle degenerate cases here; they should already
8084 have been handled anyway. */
8085 && cval1 != 0 && cval2 != 0
8086 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8087 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8088 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8089 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8090 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8091 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8092 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8094 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8095 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8097 /* We can't just pass T to eval_subst in case cval1 or cval2
8098 was the same as ARG1. */
8101 = fold_build2 (code, type,
8102 eval_subst (arg0, cval1, maxval,
8106 = fold_build2 (code, type,
8107 eval_subst (arg0, cval1, maxval,
8111 = fold_build2 (code, type,
8112 eval_subst (arg0, cval1, minval,
8116 /* All three of these results should be 0 or 1. Confirm they are.
8117 Then use those values to select the proper code to use. */
8119 if (TREE_CODE (high_result) == INTEGER_CST
8120 && TREE_CODE (equal_result) == INTEGER_CST
8121 && TREE_CODE (low_result) == INTEGER_CST)
8123 /* Make a 3-bit mask with the high-order bit being the
8124 value for `>', the next for '=', and the low for '<'. */
8125 switch ((integer_onep (high_result) * 4)
8126 + (integer_onep (equal_result) * 2)
8127 + integer_onep (low_result))
8131 return omit_one_operand (type, integer_zero_node, arg0);
8152 return omit_one_operand (type, integer_one_node, arg0);
8156 return save_expr (build2 (code, type, cval1, cval2));
8157 return fold_build2 (code, type, cval1, cval2);
8162 /* Fold a comparison of the address of COMPONENT_REFs with the same
8163 type and component to a comparison of the address of the base
8164 object. In short, &x->a OP &y->a to x OP y and
8165 &x->a OP &y.a to x OP &y */
8166 if (TREE_CODE (arg0) == ADDR_EXPR
8167 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8168 && TREE_CODE (arg1) == ADDR_EXPR
8169 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8171 tree cref0 = TREE_OPERAND (arg0, 0);
8172 tree cref1 = TREE_OPERAND (arg1, 0);
8173 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8175 tree op0 = TREE_OPERAND (cref0, 0);
8176 tree op1 = TREE_OPERAND (cref1, 0);
8177 return fold_build2 (code, type,
8178 build_fold_addr_expr (op0),
8179 build_fold_addr_expr (op1));
8183 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8184 into a single range test. */
8185 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8186 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8187 && TREE_CODE (arg1) == INTEGER_CST
8188 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8189 && !integer_zerop (TREE_OPERAND (arg0, 1))
8190 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8191 && !TREE_OVERFLOW (arg1))
8193 tem = fold_div_compare (code, type, arg0, arg1);
8194 if (tem != NULL_TREE)
8202 /* Subroutine of fold_binary. Optimize complex multiplications of the
8203 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8204 argument EXPR represents the expression "z" of type TYPE. */
8207 fold_mult_zconjz (tree type, tree expr)
8209 tree itype = TREE_TYPE (type);
8210 tree rpart, ipart, tem;
8212 if (TREE_CODE (expr) == COMPLEX_EXPR)
8214 rpart = TREE_OPERAND (expr, 0);
8215 ipart = TREE_OPERAND (expr, 1);
8217 else if (TREE_CODE (expr) == COMPLEX_CST)
8219 rpart = TREE_REALPART (expr);
8220 ipart = TREE_IMAGPART (expr);
8224 expr = save_expr (expr);
8225 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8226 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8229 rpart = save_expr (rpart);
8230 ipart = save_expr (ipart);
8231 tem = fold_build2 (PLUS_EXPR, itype,
8232 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8233 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8234 return fold_build2 (COMPLEX_EXPR, type, tem,
8235 fold_convert (itype, integer_zero_node));
8239 /* Fold a binary expression of code CODE and type TYPE with operands
8240 OP0 and OP1. Return the folded expression if folding is
8241 successful. Otherwise, return NULL_TREE. */
8244 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8246 enum tree_code_class kind = TREE_CODE_CLASS (code);
8247 tree arg0, arg1, tem;
8248 tree t1 = NULL_TREE;
8250 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8251 && TREE_CODE_LENGTH (code) == 2
8253 && op1 != NULL_TREE);
8258 /* Strip any conversions that don't change the mode. This is
8259 safe for every expression, except for a comparison expression
8260 because its signedness is derived from its operands. So, in
8261 the latter case, only strip conversions that don't change the
8264 Note that this is done as an internal manipulation within the
8265 constant folder, in order to find the simplest representation
8266 of the arguments so that their form can be studied. In any
8267 cases, the appropriate type conversions should be put back in
8268 the tree that will get out of the constant folder. */
8270 if (kind == tcc_comparison)
8272 STRIP_SIGN_NOPS (arg0);
8273 STRIP_SIGN_NOPS (arg1);
8281 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8282 constant but we can't do arithmetic on them. */
8283 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8284 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8285 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8286 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8288 if (kind == tcc_binary)
8289 tem = const_binop (code, arg0, arg1, 0);
8290 else if (kind == tcc_comparison)
8291 tem = fold_relational_const (code, type, arg0, arg1);
8295 if (tem != NULL_TREE)
8297 if (TREE_TYPE (tem) != type)
8298 tem = fold_convert (type, tem);
8303 /* If this is a commutative operation, and ARG0 is a constant, move it
8304 to ARG1 to reduce the number of tests below. */
8305 if (commutative_tree_code (code)
8306 && tree_swap_operands_p (arg0, arg1, true))
8307 return fold_build2 (code, type, op1, op0);
8309 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8311 First check for cases where an arithmetic operation is applied to a
8312 compound, conditional, or comparison operation. Push the arithmetic
8313 operation inside the compound or conditional to see if any folding
8314 can then be done. Convert comparison to conditional for this purpose.
8315 The also optimizes non-constant cases that used to be done in
8318 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8319 one of the operands is a comparison and the other is a comparison, a
8320 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8321 code below would make the expression more complex. Change it to a
8322 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8323 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8325 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8326 || code == EQ_EXPR || code == NE_EXPR)
8327 && ((truth_value_p (TREE_CODE (arg0))
8328 && (truth_value_p (TREE_CODE (arg1))
8329 || (TREE_CODE (arg1) == BIT_AND_EXPR
8330 && integer_onep (TREE_OPERAND (arg1, 1)))))
8331 || (truth_value_p (TREE_CODE (arg1))
8332 && (truth_value_p (TREE_CODE (arg0))
8333 || (TREE_CODE (arg0) == BIT_AND_EXPR
8334 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8336 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8337 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8340 fold_convert (boolean_type_node, arg0),
8341 fold_convert (boolean_type_node, arg1));
8343 if (code == EQ_EXPR)
8344 tem = invert_truthvalue (tem);
8346 return fold_convert (type, tem);
8349 if (TREE_CODE_CLASS (code) == tcc_binary
8350 || TREE_CODE_CLASS (code) == tcc_comparison)
8352 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8353 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8354 fold_build2 (code, type,
8355 TREE_OPERAND (arg0, 1), op1));
8356 if (TREE_CODE (arg1) == COMPOUND_EXPR
8357 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8358 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8359 fold_build2 (code, type,
8360 op0, TREE_OPERAND (arg1, 1)));
8362 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8364 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8366 /*cond_first_p=*/1);
8367 if (tem != NULL_TREE)
8371 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8373 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8375 /*cond_first_p=*/0);
8376 if (tem != NULL_TREE)
8384 /* A + (-B) -> A - B */
8385 if (TREE_CODE (arg1) == NEGATE_EXPR)
8386 return fold_build2 (MINUS_EXPR, type,
8387 fold_convert (type, arg0),
8388 fold_convert (type, TREE_OPERAND (arg1, 0)));
8389 /* (-A) + B -> B - A */
8390 if (TREE_CODE (arg0) == NEGATE_EXPR
8391 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8392 return fold_build2 (MINUS_EXPR, type,
8393 fold_convert (type, arg1),
8394 fold_convert (type, TREE_OPERAND (arg0, 0)));
8395 /* Convert ~A + 1 to -A. */
8396 if (INTEGRAL_TYPE_P (type)
8397 && TREE_CODE (arg0) == BIT_NOT_EXPR
8398 && integer_onep (arg1))
8399 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8401 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8403 if ((TREE_CODE (arg0) == MULT_EXPR
8404 || TREE_CODE (arg1) == MULT_EXPR)
8405 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8407 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8412 if (! FLOAT_TYPE_P (type))
8414 if (integer_zerop (arg1))
8415 return non_lvalue (fold_convert (type, arg0));
8417 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8418 with a constant, and the two constants have no bits in common,
8419 we should treat this as a BIT_IOR_EXPR since this may produce more
8421 if (TREE_CODE (arg0) == BIT_AND_EXPR
8422 && TREE_CODE (arg1) == BIT_AND_EXPR
8423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8424 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8425 && integer_zerop (const_binop (BIT_AND_EXPR,
8426 TREE_OPERAND (arg0, 1),
8427 TREE_OPERAND (arg1, 1), 0)))
8429 code = BIT_IOR_EXPR;
8433 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8434 (plus (plus (mult) (mult)) (foo)) so that we can
8435 take advantage of the factoring cases below. */
8436 if (((TREE_CODE (arg0) == PLUS_EXPR
8437 || TREE_CODE (arg0) == MINUS_EXPR)
8438 && TREE_CODE (arg1) == MULT_EXPR)
8439 || ((TREE_CODE (arg1) == PLUS_EXPR
8440 || TREE_CODE (arg1) == MINUS_EXPR)
8441 && TREE_CODE (arg0) == MULT_EXPR))
8443 tree parg0, parg1, parg, marg;
8444 enum tree_code pcode;
8446 if (TREE_CODE (arg1) == MULT_EXPR)
8447 parg = arg0, marg = arg1;
8449 parg = arg1, marg = arg0;
8450 pcode = TREE_CODE (parg);
8451 parg0 = TREE_OPERAND (parg, 0);
8452 parg1 = TREE_OPERAND (parg, 1);
8456 if (TREE_CODE (parg0) == MULT_EXPR
8457 && TREE_CODE (parg1) != MULT_EXPR)
8458 return fold_build2 (pcode, type,
8459 fold_build2 (PLUS_EXPR, type,
8460 fold_convert (type, parg0),
8461 fold_convert (type, marg)),
8462 fold_convert (type, parg1));
8463 if (TREE_CODE (parg0) != MULT_EXPR
8464 && TREE_CODE (parg1) == MULT_EXPR)
8465 return fold_build2 (PLUS_EXPR, type,
8466 fold_convert (type, parg0),
8467 fold_build2 (pcode, type,
8468 fold_convert (type, marg),
8473 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8474 of the array. Loop optimizer sometimes produce this type of
8476 if (TREE_CODE (arg0) == ADDR_EXPR)
8478 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8480 return fold_convert (type, tem);
8482 else if (TREE_CODE (arg1) == ADDR_EXPR)
8484 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8486 return fold_convert (type, tem);
8491 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8492 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8493 return non_lvalue (fold_convert (type, arg0));
8495 /* Likewise if the operands are reversed. */
8496 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8497 return non_lvalue (fold_convert (type, arg1));
8499 /* Convert X + -C into X - C. */
8500 if (TREE_CODE (arg1) == REAL_CST
8501 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8503 tem = fold_negate_const (arg1, type);
8504 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8505 return fold_build2 (MINUS_EXPR, type,
8506 fold_convert (type, arg0),
8507 fold_convert (type, tem));
8510 if (flag_unsafe_math_optimizations
8511 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8512 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8513 && (tem = distribute_real_division (code, type, arg0, arg1)))
8516 /* Convert x+x into x*2.0. */
8517 if (operand_equal_p (arg0, arg1, 0)
8518 && SCALAR_FLOAT_TYPE_P (type))
8519 return fold_build2 (MULT_EXPR, type, arg0,
8520 build_real (type, dconst2));
8522 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8523 if (flag_unsafe_math_optimizations
8524 && TREE_CODE (arg1) == PLUS_EXPR
8525 && TREE_CODE (arg0) != MULT_EXPR)
8527 tree tree10 = TREE_OPERAND (arg1, 0);
8528 tree tree11 = TREE_OPERAND (arg1, 1);
8529 if (TREE_CODE (tree11) == MULT_EXPR
8530 && TREE_CODE (tree10) == MULT_EXPR)
8533 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8534 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8537 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8538 if (flag_unsafe_math_optimizations
8539 && TREE_CODE (arg0) == PLUS_EXPR
8540 && TREE_CODE (arg1) != MULT_EXPR)
8542 tree tree00 = TREE_OPERAND (arg0, 0);
8543 tree tree01 = TREE_OPERAND (arg0, 1);
8544 if (TREE_CODE (tree01) == MULT_EXPR
8545 && TREE_CODE (tree00) == MULT_EXPR)
8548 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8549 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8555 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8556 is a rotate of A by C1 bits. */
8557 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8558 is a rotate of A by B bits. */
8560 enum tree_code code0, code1;
8561 code0 = TREE_CODE (arg0);
8562 code1 = TREE_CODE (arg1);
8563 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8564 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8565 && operand_equal_p (TREE_OPERAND (arg0, 0),
8566 TREE_OPERAND (arg1, 0), 0)
8567 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8569 tree tree01, tree11;
8570 enum tree_code code01, code11;
8572 tree01 = TREE_OPERAND (arg0, 1);
8573 tree11 = TREE_OPERAND (arg1, 1);
8574 STRIP_NOPS (tree01);
8575 STRIP_NOPS (tree11);
8576 code01 = TREE_CODE (tree01);
8577 code11 = TREE_CODE (tree11);
8578 if (code01 == INTEGER_CST
8579 && code11 == INTEGER_CST
8580 && TREE_INT_CST_HIGH (tree01) == 0
8581 && TREE_INT_CST_HIGH (tree11) == 0
8582 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8583 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8584 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8585 code0 == LSHIFT_EXPR ? tree01 : tree11);
8586 else if (code11 == MINUS_EXPR)
8588 tree tree110, tree111;
8589 tree110 = TREE_OPERAND (tree11, 0);
8590 tree111 = TREE_OPERAND (tree11, 1);
8591 STRIP_NOPS (tree110);
8592 STRIP_NOPS (tree111);
8593 if (TREE_CODE (tree110) == INTEGER_CST
8594 && 0 == compare_tree_int (tree110,
8596 (TREE_TYPE (TREE_OPERAND
8598 && operand_equal_p (tree01, tree111, 0))
8599 return build2 ((code0 == LSHIFT_EXPR
8602 type, TREE_OPERAND (arg0, 0), tree01);
8604 else if (code01 == MINUS_EXPR)
8606 tree tree010, tree011;
8607 tree010 = TREE_OPERAND (tree01, 0);
8608 tree011 = TREE_OPERAND (tree01, 1);
8609 STRIP_NOPS (tree010);
8610 STRIP_NOPS (tree011);
8611 if (TREE_CODE (tree010) == INTEGER_CST
8612 && 0 == compare_tree_int (tree010,
8614 (TREE_TYPE (TREE_OPERAND
8616 && operand_equal_p (tree11, tree011, 0))
8617 return build2 ((code0 != LSHIFT_EXPR
8620 type, TREE_OPERAND (arg0, 0), tree11);
8626 /* In most languages, can't associate operations on floats through
8627 parentheses. Rather than remember where the parentheses were, we
8628 don't associate floats at all, unless the user has specified
8629 -funsafe-math-optimizations. */
8631 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8633 tree var0, con0, lit0, minus_lit0;
8634 tree var1, con1, lit1, minus_lit1;
8636 /* Split both trees into variables, constants, and literals. Then
8637 associate each group together, the constants with literals,
8638 then the result with variables. This increases the chances of
8639 literals being recombined later and of generating relocatable
8640 expressions for the sum of a constant and literal. */
8641 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8642 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8643 code == MINUS_EXPR);
8645 /* Only do something if we found more than two objects. Otherwise,
8646 nothing has changed and we risk infinite recursion. */
8647 if (2 < ((var0 != 0) + (var1 != 0)
8648 + (con0 != 0) + (con1 != 0)
8649 + (lit0 != 0) + (lit1 != 0)
8650 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8652 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8653 if (code == MINUS_EXPR)
8656 var0 = associate_trees (var0, var1, code, type);
8657 con0 = associate_trees (con0, con1, code, type);
8658 lit0 = associate_trees (lit0, lit1, code, type);
8659 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8661 /* Preserve the MINUS_EXPR if the negative part of the literal is
8662 greater than the positive part. Otherwise, the multiplicative
8663 folding code (i.e extract_muldiv) may be fooled in case
8664 unsigned constants are subtracted, like in the following
8665 example: ((X*2 + 4) - 8U)/2. */
8666 if (minus_lit0 && lit0)
8668 if (TREE_CODE (lit0) == INTEGER_CST
8669 && TREE_CODE (minus_lit0) == INTEGER_CST
8670 && tree_int_cst_lt (lit0, minus_lit0))
8672 minus_lit0 = associate_trees (minus_lit0, lit0,
8678 lit0 = associate_trees (lit0, minus_lit0,
8686 return fold_convert (type,
8687 associate_trees (var0, minus_lit0,
8691 con0 = associate_trees (con0, minus_lit0,
8693 return fold_convert (type,
8694 associate_trees (var0, con0,
8699 con0 = associate_trees (con0, lit0, code, type);
8700 return fold_convert (type, associate_trees (var0, con0,
8708 /* A - (-B) -> A + B */
8709 if (TREE_CODE (arg1) == NEGATE_EXPR)
8710 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8711 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8712 if (TREE_CODE (arg0) == NEGATE_EXPR
8713 && (FLOAT_TYPE_P (type)
8714 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8715 && negate_expr_p (arg1)
8716 && reorder_operands_p (arg0, arg1))
8717 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8718 TREE_OPERAND (arg0, 0));
8719 /* Convert -A - 1 to ~A. */
8720 if (INTEGRAL_TYPE_P (type)
8721 && TREE_CODE (arg0) == NEGATE_EXPR
8722 && integer_onep (arg1))
8723 return fold_build1 (BIT_NOT_EXPR, type,
8724 fold_convert (type, TREE_OPERAND (arg0, 0)));
8726 /* Convert -1 - A to ~A. */
8727 if (INTEGRAL_TYPE_P (type)
8728 && integer_all_onesp (arg0))
8729 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8731 if (! FLOAT_TYPE_P (type))
8733 if (integer_zerop (arg0))
8734 return negate_expr (fold_convert (type, arg1));
8735 if (integer_zerop (arg1))
8736 return non_lvalue (fold_convert (type, arg0));
8738 /* Fold A - (A & B) into ~B & A. */
8739 if (!TREE_SIDE_EFFECTS (arg0)
8740 && TREE_CODE (arg1) == BIT_AND_EXPR)
8742 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8743 return fold_build2 (BIT_AND_EXPR, type,
8744 fold_build1 (BIT_NOT_EXPR, type,
8745 TREE_OPERAND (arg1, 0)),
8747 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8748 return fold_build2 (BIT_AND_EXPR, type,
8749 fold_build1 (BIT_NOT_EXPR, type,
8750 TREE_OPERAND (arg1, 1)),
8754 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8755 any power of 2 minus 1. */
8756 if (TREE_CODE (arg0) == BIT_AND_EXPR
8757 && TREE_CODE (arg1) == BIT_AND_EXPR
8758 && operand_equal_p (TREE_OPERAND (arg0, 0),
8759 TREE_OPERAND (arg1, 0), 0))
8761 tree mask0 = TREE_OPERAND (arg0, 1);
8762 tree mask1 = TREE_OPERAND (arg1, 1);
8763 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8765 if (operand_equal_p (tem, mask1, 0))
8767 tem = fold_build2 (BIT_XOR_EXPR, type,
8768 TREE_OPERAND (arg0, 0), mask1);
8769 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8774 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8775 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8776 return non_lvalue (fold_convert (type, arg0));
8778 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8779 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8780 (-ARG1 + ARG0) reduces to -ARG1. */
8781 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8782 return negate_expr (fold_convert (type, arg1));
8784 /* Fold &x - &x. This can happen from &x.foo - &x.
8785 This is unsafe for certain floats even in non-IEEE formats.
8786 In IEEE, it is unsafe because it does wrong for NaNs.
8787 Also note that operand_equal_p is always false if an operand
8790 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8791 && operand_equal_p (arg0, arg1, 0))
8792 return fold_convert (type, integer_zero_node);
8794 /* A - B -> A + (-B) if B is easily negatable. */
8795 if (negate_expr_p (arg1)
8796 && ((FLOAT_TYPE_P (type)
8797 /* Avoid this transformation if B is a positive REAL_CST. */
8798 && (TREE_CODE (arg1) != REAL_CST
8799 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8800 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8801 return fold_build2 (PLUS_EXPR, type,
8802 fold_convert (type, arg0),
8803 fold_convert (type, negate_expr (arg1)));
8805 /* Try folding difference of addresses. */
8809 if ((TREE_CODE (arg0) == ADDR_EXPR
8810 || TREE_CODE (arg1) == ADDR_EXPR)
8811 && ptr_difference_const (arg0, arg1, &diff))
8812 return build_int_cst_type (type, diff);
8815 /* Fold &a[i] - &a[j] to i-j. */
8816 if (TREE_CODE (arg0) == ADDR_EXPR
8817 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8818 && TREE_CODE (arg1) == ADDR_EXPR
8819 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8821 tree aref0 = TREE_OPERAND (arg0, 0);
8822 tree aref1 = TREE_OPERAND (arg1, 0);
8823 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8824 TREE_OPERAND (aref1, 0), 0))
8826 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8827 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8828 tree esz = array_ref_element_size (aref0);
8829 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8830 return fold_build2 (MULT_EXPR, type, diff,
8831 fold_convert (type, esz));
8836 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8837 of the array. Loop optimizer sometimes produce this type of
8839 if (TREE_CODE (arg0) == ADDR_EXPR)
8841 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8843 return fold_convert (type, tem);
8846 if (flag_unsafe_math_optimizations
8847 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8848 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8849 && (tem = distribute_real_division (code, type, arg0, arg1)))
8852 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8854 if ((TREE_CODE (arg0) == MULT_EXPR
8855 || TREE_CODE (arg1) == MULT_EXPR)
8856 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8858 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8866 /* (-A) * (-B) -> A * B */
8867 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8868 return fold_build2 (MULT_EXPR, type,
8869 TREE_OPERAND (arg0, 0),
8870 negate_expr (arg1));
8871 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8872 return fold_build2 (MULT_EXPR, type,
8874 TREE_OPERAND (arg1, 0));
8876 if (! FLOAT_TYPE_P (type))
8878 if (integer_zerop (arg1))
8879 return omit_one_operand (type, arg1, arg0);
8880 if (integer_onep (arg1))
8881 return non_lvalue (fold_convert (type, arg0));
8882 /* Transform x * -1 into -x. */
8883 if (integer_all_onesp (arg1))
8884 return fold_convert (type, negate_expr (arg0));
8886 /* (a * (1 << b)) is (a << b) */
8887 if (TREE_CODE (arg1) == LSHIFT_EXPR
8888 && integer_onep (TREE_OPERAND (arg1, 0)))
8889 return fold_build2 (LSHIFT_EXPR, type, arg0,
8890 TREE_OPERAND (arg1, 1));
8891 if (TREE_CODE (arg0) == LSHIFT_EXPR
8892 && integer_onep (TREE_OPERAND (arg0, 0)))
8893 return fold_build2 (LSHIFT_EXPR, type, arg1,
8894 TREE_OPERAND (arg0, 1));
8896 if (TREE_CODE (arg1) == INTEGER_CST
8897 && 0 != (tem = extract_muldiv (op0,
8898 fold_convert (type, arg1),
8900 return fold_convert (type, tem);
8902 /* Optimize z * conj(z) for integer complex numbers. */
8903 if (TREE_CODE (arg0) == CONJ_EXPR
8904 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8905 return fold_mult_zconjz (type, arg1);
8906 if (TREE_CODE (arg1) == CONJ_EXPR
8907 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8908 return fold_mult_zconjz (type, arg0);
8912 /* Maybe fold x * 0 to 0. The expressions aren't the same
8913 when x is NaN, since x * 0 is also NaN. Nor are they the
8914 same in modes with signed zeros, since multiplying a
8915 negative value by 0 gives -0, not +0. */
8916 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8917 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8918 && real_zerop (arg1))
8919 return omit_one_operand (type, arg1, arg0);
8920 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8921 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8922 && real_onep (arg1))
8923 return non_lvalue (fold_convert (type, arg0));
8925 /* Transform x * -1.0 into -x. */
8926 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8927 && real_minus_onep (arg1))
8928 return fold_convert (type, negate_expr (arg0));
8930 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8931 if (flag_unsafe_math_optimizations
8932 && TREE_CODE (arg0) == RDIV_EXPR
8933 && TREE_CODE (arg1) == REAL_CST
8934 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8936 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8939 return fold_build2 (RDIV_EXPR, type, tem,
8940 TREE_OPERAND (arg0, 1));
8943 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8944 if (operand_equal_p (arg0, arg1, 0))
8946 tree tem = fold_strip_sign_ops (arg0);
8947 if (tem != NULL_TREE)
8949 tem = fold_convert (type, tem);
8950 return fold_build2 (MULT_EXPR, type, tem, tem);
8954 /* Optimize z * conj(z) for floating point complex numbers.
8955 Guarded by flag_unsafe_math_optimizations as non-finite
8956 imaginary components don't produce scalar results. */
8957 if (flag_unsafe_math_optimizations
8958 && TREE_CODE (arg0) == CONJ_EXPR
8959 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8960 return fold_mult_zconjz (type, arg1);
8961 if (flag_unsafe_math_optimizations
8962 && TREE_CODE (arg1) == CONJ_EXPR
8963 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8964 return fold_mult_zconjz (type, arg0);
8966 if (flag_unsafe_math_optimizations)
8968 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8969 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8971 /* Optimizations of root(...)*root(...). */
8972 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8974 tree rootfn, arg, arglist;
8975 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8976 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8978 /* Optimize sqrt(x)*sqrt(x) as x. */
8979 if (BUILTIN_SQRT_P (fcode0)
8980 && operand_equal_p (arg00, arg10, 0)
8981 && ! HONOR_SNANS (TYPE_MODE (type)))
8984 /* Optimize root(x)*root(y) as root(x*y). */
8985 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8986 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8987 arglist = build_tree_list (NULL_TREE, arg);
8988 return build_function_call_expr (rootfn, arglist);
8991 /* Optimize expN(x)*expN(y) as expN(x+y). */
8992 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8994 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8995 tree arg = fold_build2 (PLUS_EXPR, type,
8996 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8997 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8998 tree arglist = build_tree_list (NULL_TREE, arg);
8999 return build_function_call_expr (expfn, arglist);
9002 /* Optimizations of pow(...)*pow(...). */
9003 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9004 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9005 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9007 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9008 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9010 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9011 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9014 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9015 if (operand_equal_p (arg01, arg11, 0))
9017 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9018 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9019 tree arglist = tree_cons (NULL_TREE, arg,
9020 build_tree_list (NULL_TREE,
9022 return build_function_call_expr (powfn, arglist);
9025 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9026 if (operand_equal_p (arg00, arg10, 0))
9028 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9029 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9030 tree arglist = tree_cons (NULL_TREE, arg00,
9031 build_tree_list (NULL_TREE,
9033 return build_function_call_expr (powfn, arglist);
9037 /* Optimize tan(x)*cos(x) as sin(x). */
9038 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9039 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9040 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9041 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9042 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9043 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9044 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9045 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9047 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9049 if (sinfn != NULL_TREE)
9050 return build_function_call_expr (sinfn,
9051 TREE_OPERAND (arg0, 1));
9054 /* Optimize x*pow(x,c) as pow(x,c+1). */
9055 if (fcode1 == BUILT_IN_POW
9056 || fcode1 == BUILT_IN_POWF
9057 || fcode1 == BUILT_IN_POWL)
9059 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9060 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9062 if (TREE_CODE (arg11) == REAL_CST
9063 && ! TREE_CONSTANT_OVERFLOW (arg11)
9064 && operand_equal_p (arg0, arg10, 0))
9066 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9070 c = TREE_REAL_CST (arg11);
9071 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9072 arg = build_real (type, c);
9073 arglist = build_tree_list (NULL_TREE, arg);
9074 arglist = tree_cons (NULL_TREE, arg0, arglist);
9075 return build_function_call_expr (powfn, arglist);
9079 /* Optimize pow(x,c)*x as pow(x,c+1). */
9080 if (fcode0 == BUILT_IN_POW
9081 || fcode0 == BUILT_IN_POWF
9082 || fcode0 == BUILT_IN_POWL)
9084 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9085 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9087 if (TREE_CODE (arg01) == REAL_CST
9088 && ! TREE_CONSTANT_OVERFLOW (arg01)
9089 && operand_equal_p (arg1, arg00, 0))
9091 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9095 c = TREE_REAL_CST (arg01);
9096 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9097 arg = build_real (type, c);
9098 arglist = build_tree_list (NULL_TREE, arg);
9099 arglist = tree_cons (NULL_TREE, arg1, arglist);
9100 return build_function_call_expr (powfn, arglist);
9104 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9106 && operand_equal_p (arg0, arg1, 0))
9108 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9112 tree arg = build_real (type, dconst2);
9113 tree arglist = build_tree_list (NULL_TREE, arg);
9114 arglist = tree_cons (NULL_TREE, arg0, arglist);
9115 return build_function_call_expr (powfn, arglist);
9124 if (integer_all_onesp (arg1))
9125 return omit_one_operand (type, arg1, arg0);
9126 if (integer_zerop (arg1))
9127 return non_lvalue (fold_convert (type, arg0));
9128 if (operand_equal_p (arg0, arg1, 0))
9129 return non_lvalue (fold_convert (type, arg0));
9132 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9135 t1 = build_int_cst (type, -1);
9136 t1 = force_fit_type (t1, 0, false, false);
9137 return omit_one_operand (type, t1, arg1);
9141 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9142 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9144 t1 = build_int_cst (type, -1);
9145 t1 = force_fit_type (t1, 0, false, false);
9146 return omit_one_operand (type, t1, arg0);
9149 /* Canonicalize (X & C1) | C2. */
9150 if (TREE_CODE (arg0) == BIT_AND_EXPR
9151 && TREE_CODE (arg1) == INTEGER_CST
9152 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9154 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9155 int width = TYPE_PRECISION (type);
9156 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9157 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9158 hi2 = TREE_INT_CST_HIGH (arg1);
9159 lo2 = TREE_INT_CST_LOW (arg1);
9161 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9162 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9163 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9165 if (width > HOST_BITS_PER_WIDE_INT)
9167 mhi = (unsigned HOST_WIDE_INT) -1
9168 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9174 mlo = (unsigned HOST_WIDE_INT) -1
9175 >> (HOST_BITS_PER_WIDE_INT - width);
9178 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9179 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9180 return fold_build2 (BIT_IOR_EXPR, type,
9181 TREE_OPERAND (arg0, 0), arg1);
9183 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9186 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9187 return fold_build2 (BIT_IOR_EXPR, type,
9188 fold_build2 (BIT_AND_EXPR, type,
9189 TREE_OPERAND (arg0, 0),
9190 build_int_cst_wide (type,
9196 /* (X & Y) | Y is (X, Y). */
9197 if (TREE_CODE (arg0) == BIT_AND_EXPR
9198 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9199 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9200 /* (X & Y) | X is (Y, X). */
9201 if (TREE_CODE (arg0) == BIT_AND_EXPR
9202 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9203 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9204 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9205 /* X | (X & Y) is (Y, X). */
9206 if (TREE_CODE (arg1) == BIT_AND_EXPR
9207 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9208 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9209 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9210 /* X | (Y & X) is (Y, X). */
9211 if (TREE_CODE (arg1) == BIT_AND_EXPR
9212 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9213 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9214 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9216 t1 = distribute_bit_expr (code, type, arg0, arg1);
9217 if (t1 != NULL_TREE)
9220 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9222 This results in more efficient code for machines without a NAND
9223 instruction. Combine will canonicalize to the first form
9224 which will allow use of NAND instructions provided by the
9225 backend if they exist. */
9226 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9227 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9229 return fold_build1 (BIT_NOT_EXPR, type,
9230 build2 (BIT_AND_EXPR, type,
9231 TREE_OPERAND (arg0, 0),
9232 TREE_OPERAND (arg1, 0)));
9235 /* See if this can be simplified into a rotate first. If that
9236 is unsuccessful continue in the association code. */
9240 if (integer_zerop (arg1))
9241 return non_lvalue (fold_convert (type, arg0));
9242 if (integer_all_onesp (arg1))
9243 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9244 if (operand_equal_p (arg0, arg1, 0))
9245 return omit_one_operand (type, integer_zero_node, arg0);
9248 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9249 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9251 t1 = build_int_cst (type, -1);
9252 t1 = force_fit_type (t1, 0, false, false);
9253 return omit_one_operand (type, t1, arg1);
9257 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9258 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9260 t1 = build_int_cst (type, -1);
9261 t1 = force_fit_type (t1, 0, false, false);
9262 return omit_one_operand (type, t1, arg0);
9265 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9266 with a constant, and the two constants have no bits in common,
9267 we should treat this as a BIT_IOR_EXPR since this may produce more
9269 if (TREE_CODE (arg0) == BIT_AND_EXPR
9270 && TREE_CODE (arg1) == BIT_AND_EXPR
9271 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9272 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9273 && integer_zerop (const_binop (BIT_AND_EXPR,
9274 TREE_OPERAND (arg0, 1),
9275 TREE_OPERAND (arg1, 1), 0)))
9277 code = BIT_IOR_EXPR;
9281 /* (X | Y) ^ X -> Y & ~ X*/
9282 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9283 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9285 tree t2 = TREE_OPERAND (arg0, 1);
9286 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9288 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9289 fold_convert (type, t1));
9293 /* (Y | X) ^ X -> Y & ~ X*/
9294 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9295 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9297 tree t2 = TREE_OPERAND (arg0, 0);
9298 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9300 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9301 fold_convert (type, t1));
9305 /* X ^ (X | Y) -> Y & ~ X*/
9306 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9307 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9309 tree t2 = TREE_OPERAND (arg1, 1);
9310 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9312 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9313 fold_convert (type, t1));
9317 /* X ^ (Y | X) -> Y & ~ X*/
9318 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9319 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9321 tree t2 = TREE_OPERAND (arg1, 0);
9322 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9324 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9325 fold_convert (type, t1));
9329 /* Convert ~X ^ ~Y to X ^ Y. */
9330 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9331 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9332 return fold_build2 (code, type,
9333 fold_convert (type, TREE_OPERAND (arg0, 0)),
9334 fold_convert (type, TREE_OPERAND (arg1, 0)));
9336 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9337 if (TREE_CODE (arg0) == BIT_AND_EXPR
9338 && integer_onep (TREE_OPERAND (arg0, 1))
9339 && integer_onep (arg1))
9340 return fold_build2 (EQ_EXPR, type, arg0,
9341 build_int_cst (TREE_TYPE (arg0), 0));
9343 /* Fold (X & Y) ^ Y as ~X & Y. */
9344 if (TREE_CODE (arg0) == BIT_AND_EXPR
9345 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9347 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9348 return fold_build2 (BIT_AND_EXPR, type,
9349 fold_build1 (BIT_NOT_EXPR, type, tem),
9350 fold_convert (type, arg1));
9352 /* Fold (X & Y) ^ X as ~Y & X. */
9353 if (TREE_CODE (arg0) == BIT_AND_EXPR
9354 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9355 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9357 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9358 return fold_build2 (BIT_AND_EXPR, type,
9359 fold_build1 (BIT_NOT_EXPR, type, tem),
9360 fold_convert (type, arg1));
9362 /* Fold X ^ (X & Y) as X & ~Y. */
9363 if (TREE_CODE (arg1) == BIT_AND_EXPR
9364 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9366 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9367 return fold_build2 (BIT_AND_EXPR, type,
9368 fold_convert (type, arg0),
9369 fold_build1 (BIT_NOT_EXPR, type, tem));
9371 /* Fold X ^ (Y & X) as ~Y & X. */
9372 if (TREE_CODE (arg1) == BIT_AND_EXPR
9373 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9374 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9376 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9377 return fold_build2 (BIT_AND_EXPR, type,
9378 fold_build1 (BIT_NOT_EXPR, type, tem),
9379 fold_convert (type, arg0));
9382 /* See if this can be simplified into a rotate first. If that
9383 is unsuccessful continue in the association code. */
9387 if (integer_all_onesp (arg1))
9388 return non_lvalue (fold_convert (type, arg0));
9389 if (integer_zerop (arg1))
9390 return omit_one_operand (type, arg1, arg0);
9391 if (operand_equal_p (arg0, arg1, 0))
9392 return non_lvalue (fold_convert (type, arg0));
9394 /* ~X & X is always zero. */
9395 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9396 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9397 return omit_one_operand (type, integer_zero_node, arg1);
9399 /* X & ~X is always zero. */
9400 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9401 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9402 return omit_one_operand (type, integer_zero_node, arg0);
9404 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9405 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9406 && TREE_CODE (arg1) == INTEGER_CST
9407 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9408 return fold_build2 (BIT_IOR_EXPR, type,
9409 fold_build2 (BIT_AND_EXPR, type,
9410 TREE_OPERAND (arg0, 0), arg1),
9411 fold_build2 (BIT_AND_EXPR, type,
9412 TREE_OPERAND (arg0, 1), arg1));
9414 /* (X | Y) & Y is (X, Y). */
9415 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9416 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9417 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9418 /* (X | Y) & X is (Y, X). */
9419 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9420 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9421 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9422 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9423 /* X & (X | Y) is (Y, X). */
9424 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9425 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9426 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9427 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9428 /* X & (Y | X) is (Y, X). */
9429 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9430 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9431 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9432 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9434 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9435 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9436 && integer_onep (TREE_OPERAND (arg0, 1))
9437 && integer_onep (arg1))
9439 tem = TREE_OPERAND (arg0, 0);
9440 return fold_build2 (EQ_EXPR, type,
9441 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9442 build_int_cst (TREE_TYPE (tem), 1)),
9443 build_int_cst (TREE_TYPE (tem), 0));
9445 /* Fold ~X & 1 as (X & 1) == 0. */
9446 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9447 && integer_onep (arg1))
9449 tem = TREE_OPERAND (arg0, 0);
9450 return fold_build2 (EQ_EXPR, type,
9451 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9452 build_int_cst (TREE_TYPE (tem), 1)),
9453 build_int_cst (TREE_TYPE (tem), 0));
9456 /* Fold (X ^ Y) & Y as ~X & Y. */
9457 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9458 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9460 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9461 return fold_build2 (BIT_AND_EXPR, type,
9462 fold_build1 (BIT_NOT_EXPR, type, tem),
9463 fold_convert (type, arg1));
9465 /* Fold (X ^ Y) & X as ~Y & X. */
9466 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9467 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9468 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9470 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9471 return fold_build2 (BIT_AND_EXPR, type,
9472 fold_build1 (BIT_NOT_EXPR, type, tem),
9473 fold_convert (type, arg1));
9475 /* Fold X & (X ^ Y) as X & ~Y. */
9476 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9477 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9479 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9480 return fold_build2 (BIT_AND_EXPR, type,
9481 fold_convert (type, arg0),
9482 fold_build1 (BIT_NOT_EXPR, type, tem));
9484 /* Fold X & (Y ^ X) as ~Y & X. */
9485 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9486 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9487 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9489 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9490 return fold_build2 (BIT_AND_EXPR, type,
9491 fold_build1 (BIT_NOT_EXPR, type, tem),
9492 fold_convert (type, arg0));
9495 t1 = distribute_bit_expr (code, type, arg0, arg1);
9496 if (t1 != NULL_TREE)
9498 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9499 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9500 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9503 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9505 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9506 && (~TREE_INT_CST_LOW (arg1)
9507 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9508 return fold_convert (type, TREE_OPERAND (arg0, 0));
9511 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9513 This results in more efficient code for machines without a NOR
9514 instruction. Combine will canonicalize to the first form
9515 which will allow use of NOR instructions provided by the
9516 backend if they exist. */
9517 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9518 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9520 return fold_build1 (BIT_NOT_EXPR, type,
9521 build2 (BIT_IOR_EXPR, type,
9522 TREE_OPERAND (arg0, 0),
9523 TREE_OPERAND (arg1, 0)));
9529 /* Don't touch a floating-point divide by zero unless the mode
9530 of the constant can represent infinity. */
9531 if (TREE_CODE (arg1) == REAL_CST
9532 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9533 && real_zerop (arg1))
9536 /* Optimize A / A to 1.0 if we don't care about
9537 NaNs or Infinities. Skip the transformation
9538 for non-real operands. */
9539 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9540 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9541 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9542 && operand_equal_p (arg0, arg1, 0))
9544 tree r = build_real (TREE_TYPE (arg0), dconst1);
9546 return omit_two_operands (type, r, arg0, arg1);
9549 /* The complex version of the above A / A optimization. */
9550 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9551 && operand_equal_p (arg0, arg1, 0))
9553 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9554 if (! HONOR_NANS (TYPE_MODE (elem_type))
9555 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9557 tree r = build_real (elem_type, dconst1);
9558 /* omit_two_operands will call fold_convert for us. */
9559 return omit_two_operands (type, r, arg0, arg1);
9563 /* (-A) / (-B) -> A / B */
9564 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9565 return fold_build2 (RDIV_EXPR, type,
9566 TREE_OPERAND (arg0, 0),
9567 negate_expr (arg1));
9568 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9569 return fold_build2 (RDIV_EXPR, type,
9571 TREE_OPERAND (arg1, 0));
9573 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9574 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9575 && real_onep (arg1))
9576 return non_lvalue (fold_convert (type, arg0));
9578 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9579 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9580 && real_minus_onep (arg1))
9581 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9583 /* If ARG1 is a constant, we can convert this to a multiply by the
9584 reciprocal. This does not have the same rounding properties,
9585 so only do this if -funsafe-math-optimizations. We can actually
9586 always safely do it if ARG1 is a power of two, but it's hard to
9587 tell if it is or not in a portable manner. */
9588 if (TREE_CODE (arg1) == REAL_CST)
9590 if (flag_unsafe_math_optimizations
9591 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9593 return fold_build2 (MULT_EXPR, type, arg0, tem);
9594 /* Find the reciprocal if optimizing and the result is exact. */
9598 r = TREE_REAL_CST (arg1);
9599 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9601 tem = build_real (type, r);
9602 return fold_build2 (MULT_EXPR, type,
9603 fold_convert (type, arg0), tem);
9607 /* Convert A/B/C to A/(B*C). */
9608 if (flag_unsafe_math_optimizations
9609 && TREE_CODE (arg0) == RDIV_EXPR)
9610 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9611 fold_build2 (MULT_EXPR, type,
9612 TREE_OPERAND (arg0, 1), arg1));
9614 /* Convert A/(B/C) to (A/B)*C. */
9615 if (flag_unsafe_math_optimizations
9616 && TREE_CODE (arg1) == RDIV_EXPR)
9617 return fold_build2 (MULT_EXPR, type,
9618 fold_build2 (RDIV_EXPR, type, arg0,
9619 TREE_OPERAND (arg1, 0)),
9620 TREE_OPERAND (arg1, 1));
9622 /* Convert C1/(X*C2) into (C1/C2)/X. */
9623 if (flag_unsafe_math_optimizations
9624 && TREE_CODE (arg1) == MULT_EXPR
9625 && TREE_CODE (arg0) == REAL_CST
9626 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9628 tree tem = const_binop (RDIV_EXPR, arg0,
9629 TREE_OPERAND (arg1, 1), 0);
9631 return fold_build2 (RDIV_EXPR, type, tem,
9632 TREE_OPERAND (arg1, 0));
9635 if (flag_unsafe_math_optimizations)
9637 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9638 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9640 /* Optimize sin(x)/cos(x) as tan(x). */
9641 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9642 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9643 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9644 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9645 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9647 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9649 if (tanfn != NULL_TREE)
9650 return build_function_call_expr (tanfn,
9651 TREE_OPERAND (arg0, 1));
9654 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9655 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9656 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9657 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9658 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9659 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9661 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9663 if (tanfn != NULL_TREE)
9665 tree tmp = TREE_OPERAND (arg0, 1);
9666 tmp = build_function_call_expr (tanfn, tmp);
9667 return fold_build2 (RDIV_EXPR, type,
9668 build_real (type, dconst1), tmp);
9672 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9673 NaNs or Infinities. */
9674 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9675 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9676 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9678 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9679 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9681 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9682 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9683 && operand_equal_p (arg00, arg01, 0))
9685 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9687 if (cosfn != NULL_TREE)
9688 return build_function_call_expr (cosfn,
9689 TREE_OPERAND (arg0, 1));
9693 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9694 NaNs or Infinities. */
9695 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9696 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9697 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9699 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9700 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9702 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9703 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9704 && operand_equal_p (arg00, arg01, 0))
9706 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9708 if (cosfn != NULL_TREE)
9710 tree tmp = TREE_OPERAND (arg0, 1);
9711 tmp = build_function_call_expr (cosfn, tmp);
9712 return fold_build2 (RDIV_EXPR, type,
9713 build_real (type, dconst1),
9719 /* Optimize pow(x,c)/x as pow(x,c-1). */
9720 if (fcode0 == BUILT_IN_POW
9721 || fcode0 == BUILT_IN_POWF
9722 || fcode0 == BUILT_IN_POWL)
9724 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9725 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9726 if (TREE_CODE (arg01) == REAL_CST
9727 && ! TREE_CONSTANT_OVERFLOW (arg01)
9728 && operand_equal_p (arg1, arg00, 0))
9730 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9734 c = TREE_REAL_CST (arg01);
9735 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9736 arg = build_real (type, c);
9737 arglist = build_tree_list (NULL_TREE, arg);
9738 arglist = tree_cons (NULL_TREE, arg1, arglist);
9739 return build_function_call_expr (powfn, arglist);
9743 /* Optimize x/expN(y) into x*expN(-y). */
9744 if (BUILTIN_EXPONENT_P (fcode1))
9746 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9747 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9748 tree arglist = build_tree_list (NULL_TREE,
9749 fold_convert (type, arg));
9750 arg1 = build_function_call_expr (expfn, arglist);
9751 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9754 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9755 if (fcode1 == BUILT_IN_POW
9756 || fcode1 == BUILT_IN_POWF
9757 || fcode1 == BUILT_IN_POWL)
9759 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9760 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9761 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9762 tree neg11 = fold_convert (type, negate_expr (arg11));
9763 tree arglist = tree_cons(NULL_TREE, arg10,
9764 build_tree_list (NULL_TREE, neg11));
9765 arg1 = build_function_call_expr (powfn, arglist);
9766 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9771 case TRUNC_DIV_EXPR:
9772 case FLOOR_DIV_EXPR:
9773 /* Simplify A / (B << N) where A and B are positive and B is
9774 a power of 2, to A >> (N + log2(B)). */
9775 if (TREE_CODE (arg1) == LSHIFT_EXPR
9776 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9778 tree sval = TREE_OPERAND (arg1, 0);
9779 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9781 tree sh_cnt = TREE_OPERAND (arg1, 1);
9782 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9784 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9785 sh_cnt, build_int_cst (NULL_TREE, pow2));
9786 return fold_build2 (RSHIFT_EXPR, type,
9787 fold_convert (type, arg0), sh_cnt);
9792 case ROUND_DIV_EXPR:
9794 case EXACT_DIV_EXPR:
9795 if (integer_onep (arg1))
9796 return non_lvalue (fold_convert (type, arg0));
9797 if (integer_zerop (arg1))
9800 if (!TYPE_UNSIGNED (type)
9801 && TREE_CODE (arg1) == INTEGER_CST
9802 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9803 && TREE_INT_CST_HIGH (arg1) == -1)
9804 return fold_convert (type, negate_expr (arg0));
9806 /* Convert -A / -B to A / B when the type is signed and overflow is
9808 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9809 && TREE_CODE (arg0) == NEGATE_EXPR
9810 && negate_expr_p (arg1))
9811 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9812 negate_expr (arg1));
9813 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9814 && TREE_CODE (arg1) == NEGATE_EXPR
9815 && negate_expr_p (arg0))
9816 return fold_build2 (code, type, negate_expr (arg0),
9817 TREE_OPERAND (arg1, 0));
9819 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9820 operation, EXACT_DIV_EXPR.
9822 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9823 At one time others generated faster code, it's not clear if they do
9824 after the last round to changes to the DIV code in expmed.c. */
9825 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9826 && multiple_of_p (type, arg0, arg1))
9827 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9829 if (TREE_CODE (arg1) == INTEGER_CST
9830 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9831 return fold_convert (type, tem);
9836 case FLOOR_MOD_EXPR:
9837 case ROUND_MOD_EXPR:
9838 case TRUNC_MOD_EXPR:
9839 /* X % 1 is always zero, but be sure to preserve any side
9841 if (integer_onep (arg1))
9842 return omit_one_operand (type, integer_zero_node, arg0);
9844 /* X % 0, return X % 0 unchanged so that we can get the
9845 proper warnings and errors. */
9846 if (integer_zerop (arg1))
9849 /* 0 % X is always zero, but be sure to preserve any side
9850 effects in X. Place this after checking for X == 0. */
9851 if (integer_zerop (arg0))
9852 return omit_one_operand (type, integer_zero_node, arg1);
9854 /* X % -1 is zero. */
9855 if (!TYPE_UNSIGNED (type)
9856 && TREE_CODE (arg1) == INTEGER_CST
9857 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9858 && TREE_INT_CST_HIGH (arg1) == -1)
9859 return omit_one_operand (type, integer_zero_node, arg0);
9861 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9862 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9863 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9864 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9867 /* Also optimize A % (C << N) where C is a power of 2,
9868 to A & ((C << N) - 1). */
9869 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9870 c = TREE_OPERAND (arg1, 0);
9872 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9874 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9875 arg1, integer_one_node);
9876 return fold_build2 (BIT_AND_EXPR, type,
9877 fold_convert (type, arg0),
9878 fold_convert (type, mask));
9882 /* X % -C is the same as X % C. */
9883 if (code == TRUNC_MOD_EXPR
9884 && !TYPE_UNSIGNED (type)
9885 && TREE_CODE (arg1) == INTEGER_CST
9886 && !TREE_CONSTANT_OVERFLOW (arg1)
9887 && TREE_INT_CST_HIGH (arg1) < 0
9889 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9890 && !sign_bit_p (arg1, arg1))
9891 return fold_build2 (code, type, fold_convert (type, arg0),
9892 fold_convert (type, negate_expr (arg1)));
9894 /* X % -Y is the same as X % Y. */
9895 if (code == TRUNC_MOD_EXPR
9896 && !TYPE_UNSIGNED (type)
9897 && TREE_CODE (arg1) == NEGATE_EXPR
9899 return fold_build2 (code, type, fold_convert (type, arg0),
9900 fold_convert (type, TREE_OPERAND (arg1, 0)));
9902 if (TREE_CODE (arg1) == INTEGER_CST
9903 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9904 return fold_convert (type, tem);
9910 if (integer_all_onesp (arg0))
9911 return omit_one_operand (type, arg0, arg1);
9915 /* Optimize -1 >> x for arithmetic right shifts. */
9916 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9917 return omit_one_operand (type, arg0, arg1);
9918 /* ... fall through ... */
9922 if (integer_zerop (arg1))
9923 return non_lvalue (fold_convert (type, arg0));
9924 if (integer_zerop (arg0))
9925 return omit_one_operand (type, arg0, arg1);
9927 /* Since negative shift count is not well-defined,
9928 don't try to compute it in the compiler. */
9929 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9932 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9933 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
9934 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9935 && host_integerp (TREE_OPERAND (arg0, 1), false)
9936 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9938 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9939 + TREE_INT_CST_LOW (arg1));
9941 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9942 being well defined. */
9943 if (low >= TYPE_PRECISION (type))
9945 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9946 low = low % TYPE_PRECISION (type);
9947 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9948 return build_int_cst (type, 0);
9950 low = TYPE_PRECISION (type) - 1;
9953 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9954 build_int_cst (type, low));
9957 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9958 into x & ((unsigned)-1 >> c) for unsigned types. */
9959 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9960 || (TYPE_UNSIGNED (type)
9961 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9962 && host_integerp (arg1, false)
9963 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9964 && host_integerp (TREE_OPERAND (arg0, 1), false)
9965 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9967 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9968 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9974 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9976 lshift = build_int_cst (type, -1);
9977 lshift = int_const_binop (code, lshift, arg1, 0);
9979 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9983 /* Rewrite an LROTATE_EXPR by a constant into an
9984 RROTATE_EXPR by a new constant. */
9985 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9987 tree tem = build_int_cst (NULL_TREE,
9988 GET_MODE_BITSIZE (TYPE_MODE (type)));
9989 tem = fold_convert (TREE_TYPE (arg1), tem);
9990 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9991 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9994 /* If we have a rotate of a bit operation with the rotate count and
9995 the second operand of the bit operation both constant,
9996 permute the two operations. */
9997 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9998 && (TREE_CODE (arg0) == BIT_AND_EXPR
9999 || TREE_CODE (arg0) == BIT_IOR_EXPR
10000 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10001 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10002 return fold_build2 (TREE_CODE (arg0), type,
10003 fold_build2 (code, type,
10004 TREE_OPERAND (arg0, 0), arg1),
10005 fold_build2 (code, type,
10006 TREE_OPERAND (arg0, 1), arg1));
10008 /* Two consecutive rotates adding up to the width of the mode can
10010 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10011 && TREE_CODE (arg0) == RROTATE_EXPR
10012 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10013 && TREE_INT_CST_HIGH (arg1) == 0
10014 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10015 && ((TREE_INT_CST_LOW (arg1)
10016 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10017 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10018 return TREE_OPERAND (arg0, 0);
10023 if (operand_equal_p (arg0, arg1, 0))
10024 return omit_one_operand (type, arg0, arg1);
10025 if (INTEGRAL_TYPE_P (type)
10026 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10027 return omit_one_operand (type, arg1, arg0);
10028 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10034 if (operand_equal_p (arg0, arg1, 0))
10035 return omit_one_operand (type, arg0, arg1);
10036 if (INTEGRAL_TYPE_P (type)
10037 && TYPE_MAX_VALUE (type)
10038 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10039 return omit_one_operand (type, arg1, arg0);
10040 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10045 case TRUTH_ANDIF_EXPR:
10046 /* Note that the operands of this must be ints
10047 and their values must be 0 or 1.
10048 ("true" is a fixed value perhaps depending on the language.) */
10049 /* If first arg is constant zero, return it. */
10050 if (integer_zerop (arg0))
10051 return fold_convert (type, arg0);
10052 case TRUTH_AND_EXPR:
10053 /* If either arg is constant true, drop it. */
10054 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10055 return non_lvalue (fold_convert (type, arg1));
10056 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10057 /* Preserve sequence points. */
10058 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10059 return non_lvalue (fold_convert (type, arg0));
10060 /* If second arg is constant zero, result is zero, but first arg
10061 must be evaluated. */
10062 if (integer_zerop (arg1))
10063 return omit_one_operand (type, arg1, arg0);
10064 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10065 case will be handled here. */
10066 if (integer_zerop (arg0))
10067 return omit_one_operand (type, arg0, arg1);
10069 /* !X && X is always false. */
10070 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10071 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10072 return omit_one_operand (type, integer_zero_node, arg1);
10073 /* X && !X is always false. */
10074 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10075 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10076 return omit_one_operand (type, integer_zero_node, arg0);
10078 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10079 means A >= Y && A != MAX, but in this case we know that
10082 if (!TREE_SIDE_EFFECTS (arg0)
10083 && !TREE_SIDE_EFFECTS (arg1))
10085 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10086 if (tem && !operand_equal_p (tem, arg0, 0))
10087 return fold_build2 (code, type, tem, arg1);
10089 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10090 if (tem && !operand_equal_p (tem, arg1, 0))
10091 return fold_build2 (code, type, arg0, tem);
10095 /* We only do these simplifications if we are optimizing. */
10099 /* Check for things like (A || B) && (A || C). We can convert this
10100 to A || (B && C). Note that either operator can be any of the four
10101 truth and/or operations and the transformation will still be
10102 valid. Also note that we only care about order for the
10103 ANDIF and ORIF operators. If B contains side effects, this
10104 might change the truth-value of A. */
10105 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10106 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10107 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10108 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10109 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10110 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10112 tree a00 = TREE_OPERAND (arg0, 0);
10113 tree a01 = TREE_OPERAND (arg0, 1);
10114 tree a10 = TREE_OPERAND (arg1, 0);
10115 tree a11 = TREE_OPERAND (arg1, 1);
10116 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10117 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10118 && (code == TRUTH_AND_EXPR
10119 || code == TRUTH_OR_EXPR));
10121 if (operand_equal_p (a00, a10, 0))
10122 return fold_build2 (TREE_CODE (arg0), type, a00,
10123 fold_build2 (code, type, a01, a11));
10124 else if (commutative && operand_equal_p (a00, a11, 0))
10125 return fold_build2 (TREE_CODE (arg0), type, a00,
10126 fold_build2 (code, type, a01, a10));
10127 else if (commutative && operand_equal_p (a01, a10, 0))
10128 return fold_build2 (TREE_CODE (arg0), type, a01,
10129 fold_build2 (code, type, a00, a11));
10131 /* This case if tricky because we must either have commutative
10132 operators or else A10 must not have side-effects. */
10134 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10135 && operand_equal_p (a01, a11, 0))
10136 return fold_build2 (TREE_CODE (arg0), type,
10137 fold_build2 (code, type, a00, a10),
10141 /* See if we can build a range comparison. */
10142 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10145 /* Check for the possibility of merging component references. If our
10146 lhs is another similar operation, try to merge its rhs with our
10147 rhs. Then try to merge our lhs and rhs. */
10148 if (TREE_CODE (arg0) == code
10149 && 0 != (tem = fold_truthop (code, type,
10150 TREE_OPERAND (arg0, 1), arg1)))
10151 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10153 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10158 case TRUTH_ORIF_EXPR:
10159 /* Note that the operands of this must be ints
10160 and their values must be 0 or true.
10161 ("true" is a fixed value perhaps depending on the language.) */
10162 /* If first arg is constant true, return it. */
10163 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10164 return fold_convert (type, arg0);
10165 case TRUTH_OR_EXPR:
10166 /* If either arg is constant zero, drop it. */
10167 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10168 return non_lvalue (fold_convert (type, arg1));
10169 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10170 /* Preserve sequence points. */
10171 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10172 return non_lvalue (fold_convert (type, arg0));
10173 /* If second arg is constant true, result is true, but we must
10174 evaluate first arg. */
10175 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10176 return omit_one_operand (type, arg1, arg0);
10177 /* Likewise for first arg, but note this only occurs here for
10179 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10180 return omit_one_operand (type, arg0, arg1);
10182 /* !X || X is always true. */
10183 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10184 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10185 return omit_one_operand (type, integer_one_node, arg1);
10186 /* X || !X is always true. */
10187 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10188 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10189 return omit_one_operand (type, integer_one_node, arg0);
10193 case TRUTH_XOR_EXPR:
10194 /* If the second arg is constant zero, drop it. */
10195 if (integer_zerop (arg1))
10196 return non_lvalue (fold_convert (type, arg0));
10197 /* If the second arg is constant true, this is a logical inversion. */
10198 if (integer_onep (arg1))
10200 /* Only call invert_truthvalue if operand is a truth value. */
10201 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10202 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10204 tem = invert_truthvalue (arg0);
10205 return non_lvalue (fold_convert (type, tem));
10207 /* Identical arguments cancel to zero. */
10208 if (operand_equal_p (arg0, arg1, 0))
10209 return omit_one_operand (type, integer_zero_node, arg0);
10211 /* !X ^ X is always true. */
10212 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10214 return omit_one_operand (type, integer_one_node, arg1);
10216 /* X ^ !X is always true. */
10217 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10218 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10219 return omit_one_operand (type, integer_one_node, arg0);
10225 tem = fold_comparison (code, type, op0, op1);
10226 if (tem != NULL_TREE)
10229 /* bool_var != 0 becomes bool_var. */
10230 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10231 && code == NE_EXPR)
10232 return non_lvalue (fold_convert (type, arg0));
10234 /* bool_var == 1 becomes bool_var. */
10235 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10236 && code == EQ_EXPR)
10237 return non_lvalue (fold_convert (type, arg0));
10239 /* bool_var != 1 becomes !bool_var. */
10240 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10241 && code == NE_EXPR)
10242 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10244 /* bool_var == 0 becomes !bool_var. */
10245 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10246 && code == EQ_EXPR)
10247 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10249 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10250 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10251 && TREE_CODE (arg1) == INTEGER_CST)
10252 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10253 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10256 /* If this is an equality comparison of the address of a non-weak
10257 object against zero, then we know the result. */
10258 if (TREE_CODE (arg0) == ADDR_EXPR
10259 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10260 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10261 && integer_zerop (arg1))
10262 return constant_boolean_node (code != EQ_EXPR, type);
10264 /* If this is an equality comparison of the address of two non-weak,
10265 unaliased symbols neither of which are extern (since we do not
10266 have access to attributes for externs), then we know the result. */
10267 if (TREE_CODE (arg0) == ADDR_EXPR
10268 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10269 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10270 && ! lookup_attribute ("alias",
10271 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10272 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10273 && TREE_CODE (arg1) == ADDR_EXPR
10274 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10275 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10276 && ! lookup_attribute ("alias",
10277 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10278 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10280 /* We know that we're looking at the address of two
10281 non-weak, unaliased, static _DECL nodes.
10283 It is both wasteful and incorrect to call operand_equal_p
10284 to compare the two ADDR_EXPR nodes. It is wasteful in that
10285 all we need to do is test pointer equality for the arguments
10286 to the two ADDR_EXPR nodes. It is incorrect to use
10287 operand_equal_p as that function is NOT equivalent to a
10288 C equality test. It can in fact return false for two
10289 objects which would test as equal using the C equality
10291 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10292 return constant_boolean_node (equal
10293 ? code == EQ_EXPR : code != EQ_EXPR,
10297 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10298 a MINUS_EXPR of a constant, we can convert it into a comparison with
10299 a revised constant as long as no overflow occurs. */
10300 if (TREE_CODE (arg1) == INTEGER_CST
10301 && (TREE_CODE (arg0) == PLUS_EXPR
10302 || TREE_CODE (arg0) == MINUS_EXPR)
10303 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10304 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10305 ? MINUS_EXPR : PLUS_EXPR,
10306 arg1, TREE_OPERAND (arg0, 1), 0))
10307 && ! TREE_CONSTANT_OVERFLOW (tem))
10308 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10310 /* Similarly for a NEGATE_EXPR. */
10311 if (TREE_CODE (arg0) == NEGATE_EXPR
10312 && TREE_CODE (arg1) == INTEGER_CST
10313 && 0 != (tem = negate_expr (arg1))
10314 && TREE_CODE (tem) == INTEGER_CST
10315 && ! TREE_CONSTANT_OVERFLOW (tem))
10316 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10318 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10319 for !=. Don't do this for ordered comparisons due to overflow. */
10320 if (TREE_CODE (arg0) == MINUS_EXPR
10321 && integer_zerop (arg1))
10322 return fold_build2 (code, type,
10323 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10325 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10326 if (TREE_CODE (arg0) == ABS_EXPR
10327 && (integer_zerop (arg1) || real_zerop (arg1)))
10328 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10330 /* If this is an EQ or NE comparison with zero and ARG0 is
10331 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10332 two operations, but the latter can be done in one less insn
10333 on machines that have only two-operand insns or on which a
10334 constant cannot be the first operand. */
10335 if (TREE_CODE (arg0) == BIT_AND_EXPR
10336 && integer_zerop (arg1))
10338 tree arg00 = TREE_OPERAND (arg0, 0);
10339 tree arg01 = TREE_OPERAND (arg0, 1);
10340 if (TREE_CODE (arg00) == LSHIFT_EXPR
10341 && integer_onep (TREE_OPERAND (arg00, 0)))
10343 fold_build2 (code, type,
10344 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10345 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10346 arg01, TREE_OPERAND (arg00, 1)),
10347 fold_convert (TREE_TYPE (arg0),
10348 integer_one_node)),
10350 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10351 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10353 fold_build2 (code, type,
10354 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10355 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10356 arg00, TREE_OPERAND (arg01, 1)),
10357 fold_convert (TREE_TYPE (arg0),
10358 integer_one_node)),
10362 /* If this is an NE or EQ comparison of zero against the result of a
10363 signed MOD operation whose second operand is a power of 2, make
10364 the MOD operation unsigned since it is simpler and equivalent. */
10365 if (integer_zerop (arg1)
10366 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10367 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10368 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10369 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10370 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10371 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10373 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10374 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10375 fold_convert (newtype,
10376 TREE_OPERAND (arg0, 0)),
10377 fold_convert (newtype,
10378 TREE_OPERAND (arg0, 1)));
10380 return fold_build2 (code, type, newmod,
10381 fold_convert (newtype, arg1));
10384 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10385 C1 is a valid shift constant, and C2 is a power of two, i.e.
10387 if (TREE_CODE (arg0) == BIT_AND_EXPR
10388 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10389 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10391 && integer_pow2p (TREE_OPERAND (arg0, 1))
10392 && integer_zerop (arg1))
10394 tree itype = TREE_TYPE (arg0);
10395 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10396 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10398 /* Check for a valid shift count. */
10399 if (TREE_INT_CST_HIGH (arg001) == 0
10400 && TREE_INT_CST_LOW (arg001) < prec)
10402 tree arg01 = TREE_OPERAND (arg0, 1);
10403 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10404 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10405 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10406 can be rewritten as (X & (C2 << C1)) != 0. */
10407 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10409 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10410 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10411 return fold_build2 (code, type, tem, arg1);
10413 /* Otherwise, for signed (arithmetic) shifts,
10414 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10415 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10416 else if (!TYPE_UNSIGNED (itype))
10417 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10418 arg000, build_int_cst (itype, 0));
10419 /* Otherwise, of unsigned (logical) shifts,
10420 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10421 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10423 return omit_one_operand (type,
10424 code == EQ_EXPR ? integer_one_node
10425 : integer_zero_node,
10430 /* If this is an NE comparison of zero with an AND of one, remove the
10431 comparison since the AND will give the correct value. */
10432 if (code == NE_EXPR
10433 && integer_zerop (arg1)
10434 && TREE_CODE (arg0) == BIT_AND_EXPR
10435 && integer_onep (TREE_OPERAND (arg0, 1)))
10436 return fold_convert (type, arg0);
10438 /* If we have (A & C) == C where C is a power of 2, convert this into
10439 (A & C) != 0. Similarly for NE_EXPR. */
10440 if (TREE_CODE (arg0) == BIT_AND_EXPR
10441 && integer_pow2p (TREE_OPERAND (arg0, 1))
10442 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10443 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10444 arg0, fold_convert (TREE_TYPE (arg0),
10445 integer_zero_node));
10447 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10448 bit, then fold the expression into A < 0 or A >= 0. */
10449 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10453 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10454 Similarly for NE_EXPR. */
10455 if (TREE_CODE (arg0) == BIT_AND_EXPR
10456 && TREE_CODE (arg1) == INTEGER_CST
10457 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10459 tree notc = fold_build1 (BIT_NOT_EXPR,
10460 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10461 TREE_OPERAND (arg0, 1));
10462 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10464 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10465 if (integer_nonzerop (dandnotc))
10466 return omit_one_operand (type, rslt, arg0);
10469 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10470 Similarly for NE_EXPR. */
10471 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10472 && TREE_CODE (arg1) == INTEGER_CST
10473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10475 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10476 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10477 TREE_OPERAND (arg0, 1), notd);
10478 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10479 if (integer_nonzerop (candnotd))
10480 return omit_one_operand (type, rslt, arg0);
10483 /* If this is a comparison of a field, we may be able to simplify it. */
10484 if (((TREE_CODE (arg0) == COMPONENT_REF
10485 && lang_hooks.can_use_bit_fields_p ())
10486 || TREE_CODE (arg0) == BIT_FIELD_REF)
10487 /* Handle the constant case even without -O
10488 to make sure the warnings are given. */
10489 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10491 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10496 /* Optimize comparisons of strlen vs zero to a compare of the
10497 first character of the string vs zero. To wit,
10498 strlen(ptr) == 0 => *ptr == 0
10499 strlen(ptr) != 0 => *ptr != 0
10500 Other cases should reduce to one of these two (or a constant)
10501 due to the return value of strlen being unsigned. */
10502 if (TREE_CODE (arg0) == CALL_EXPR
10503 && integer_zerop (arg1))
10505 tree fndecl = get_callee_fndecl (arg0);
10509 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10510 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10511 && (arglist = TREE_OPERAND (arg0, 1))
10512 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10513 && ! TREE_CHAIN (arglist))
10515 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10516 return fold_build2 (code, type, iref,
10517 build_int_cst (TREE_TYPE (iref), 0));
10521 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10522 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10523 if (TREE_CODE (arg0) == RSHIFT_EXPR
10524 && integer_zerop (arg1)
10525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10527 tree arg00 = TREE_OPERAND (arg0, 0);
10528 tree arg01 = TREE_OPERAND (arg0, 1);
10529 tree itype = TREE_TYPE (arg00);
10530 if (TREE_INT_CST_HIGH (arg01) == 0
10531 && TREE_INT_CST_LOW (arg01)
10532 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10534 if (TYPE_UNSIGNED (itype))
10536 itype = lang_hooks.types.signed_type (itype);
10537 arg00 = fold_convert (itype, arg00);
10539 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10540 type, arg00, build_int_cst (itype, 0));
10544 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10545 if (integer_zerop (arg1)
10546 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10547 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10548 TREE_OPERAND (arg0, 1));
10550 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10551 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10552 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10553 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10554 build_int_cst (TREE_TYPE (arg1), 0));
10555 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10556 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10557 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10558 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10559 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10560 build_int_cst (TREE_TYPE (arg1), 0));
10562 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10563 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10564 && TREE_CODE (arg1) == INTEGER_CST
10565 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10566 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10567 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10568 TREE_OPERAND (arg0, 1), arg1));
10570 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10571 (X & C) == 0 when C is a single bit. */
10572 if (TREE_CODE (arg0) == BIT_AND_EXPR
10573 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10574 && integer_zerop (arg1)
10575 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10577 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10578 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10579 TREE_OPERAND (arg0, 1));
10580 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10584 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10585 constant C is a power of two, i.e. a single bit. */
10586 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10587 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10588 && integer_zerop (arg1)
10589 && integer_pow2p (TREE_OPERAND (arg0, 1))
10590 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10591 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10593 tree arg00 = TREE_OPERAND (arg0, 0);
10594 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10595 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10598 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10599 when is C is a power of two, i.e. a single bit. */
10600 if (TREE_CODE (arg0) == BIT_AND_EXPR
10601 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10602 && integer_zerop (arg1)
10603 && integer_pow2p (TREE_OPERAND (arg0, 1))
10604 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10605 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10607 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10608 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10609 arg000, TREE_OPERAND (arg0, 1));
10610 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10611 tem, build_int_cst (TREE_TYPE (tem), 0));
10614 if (integer_zerop (arg1)
10615 && tree_expr_nonzero_p (arg0))
10617 tree res = constant_boolean_node (code==NE_EXPR, type);
10618 return omit_one_operand (type, res, arg0);
10626 tem = fold_comparison (code, type, op0, op1);
10627 if (tem != NULL_TREE)
10630 /* Transform comparisons of the form X +- C CMP X. */
10631 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10632 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10633 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10634 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10635 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10636 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10637 && !(flag_wrapv || flag_trapv))))
10639 tree arg01 = TREE_OPERAND (arg0, 1);
10640 enum tree_code code0 = TREE_CODE (arg0);
10643 if (TREE_CODE (arg01) == REAL_CST)
10644 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10646 is_positive = tree_int_cst_sgn (arg01);
10648 /* (X - c) > X becomes false. */
10649 if (code == GT_EXPR
10650 && ((code0 == MINUS_EXPR && is_positive >= 0)
10651 || (code0 == PLUS_EXPR && is_positive <= 0)))
10652 return constant_boolean_node (0, type);
10654 /* Likewise (X + c) < X becomes false. */
10655 if (code == LT_EXPR
10656 && ((code0 == PLUS_EXPR && is_positive >= 0)
10657 || (code0 == MINUS_EXPR && is_positive <= 0)))
10658 return constant_boolean_node (0, type);
10660 /* Convert (X - c) <= X to true. */
10661 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10663 && ((code0 == MINUS_EXPR && is_positive >= 0)
10664 || (code0 == PLUS_EXPR && is_positive <= 0)))
10665 return constant_boolean_node (1, type);
10667 /* Convert (X + c) >= X to true. */
10668 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10670 && ((code0 == PLUS_EXPR && is_positive >= 0)
10671 || (code0 == MINUS_EXPR && is_positive <= 0)))
10672 return constant_boolean_node (1, type);
10674 if (TREE_CODE (arg01) == INTEGER_CST)
10676 /* Convert X + c > X and X - c < X to true for integers. */
10677 if (code == GT_EXPR
10678 && ((code0 == PLUS_EXPR && is_positive > 0)
10679 || (code0 == MINUS_EXPR && is_positive < 0)))
10680 return constant_boolean_node (1, type);
10682 if (code == LT_EXPR
10683 && ((code0 == MINUS_EXPR && is_positive > 0)
10684 || (code0 == PLUS_EXPR && is_positive < 0)))
10685 return constant_boolean_node (1, type);
10687 /* Convert X + c <= X and X - c >= X to false for integers. */
10688 if (code == LE_EXPR
10689 && ((code0 == PLUS_EXPR && is_positive > 0)
10690 || (code0 == MINUS_EXPR && is_positive < 0)))
10691 return constant_boolean_node (0, type);
10693 if (code == GE_EXPR
10694 && ((code0 == MINUS_EXPR && is_positive > 0)
10695 || (code0 == PLUS_EXPR && is_positive < 0)))
10696 return constant_boolean_node (0, type);
10700 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10701 This transformation affects the cases which are handled in later
10702 optimizations involving comparisons with non-negative constants. */
10703 if (TREE_CODE (arg1) == INTEGER_CST
10704 && TREE_CODE (arg0) != INTEGER_CST
10705 && tree_int_cst_sgn (arg1) > 0)
10707 if (code == GE_EXPR)
10709 arg1 = const_binop (MINUS_EXPR, arg1,
10710 build_int_cst (TREE_TYPE (arg1), 1), 0);
10711 return fold_build2 (GT_EXPR, type, arg0,
10712 fold_convert (TREE_TYPE (arg0), arg1));
10714 if (code == LT_EXPR)
10716 arg1 = const_binop (MINUS_EXPR, arg1,
10717 build_int_cst (TREE_TYPE (arg1), 1), 0);
10718 return fold_build2 (LE_EXPR, type, arg0,
10719 fold_convert (TREE_TYPE (arg0), arg1));
10723 /* Comparisons with the highest or lowest possible integer of
10724 the specified size will have known values. */
10726 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10728 if (TREE_CODE (arg1) == INTEGER_CST
10729 && ! TREE_CONSTANT_OVERFLOW (arg1)
10730 && width <= 2 * HOST_BITS_PER_WIDE_INT
10731 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10732 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10734 HOST_WIDE_INT signed_max_hi;
10735 unsigned HOST_WIDE_INT signed_max_lo;
10736 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10738 if (width <= HOST_BITS_PER_WIDE_INT)
10740 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10745 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10747 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10753 max_lo = signed_max_lo;
10754 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10760 width -= HOST_BITS_PER_WIDE_INT;
10761 signed_max_lo = -1;
10762 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10767 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10769 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10774 max_hi = signed_max_hi;
10775 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10779 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10780 && TREE_INT_CST_LOW (arg1) == max_lo)
10784 return omit_one_operand (type, integer_zero_node, arg0);
10787 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10790 return omit_one_operand (type, integer_one_node, arg0);
10793 return fold_build2 (NE_EXPR, type, arg0, arg1);
10795 /* The GE_EXPR and LT_EXPR cases above are not normally
10796 reached because of previous transformations. */
10801 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10803 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10807 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10808 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10810 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10811 return fold_build2 (NE_EXPR, type, arg0, arg1);
10815 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10817 && TREE_INT_CST_LOW (arg1) == min_lo)
10821 return omit_one_operand (type, integer_zero_node, arg0);
10824 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10827 return omit_one_operand (type, integer_one_node, arg0);
10830 return fold_build2 (NE_EXPR, type, op0, op1);
10835 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10837 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10841 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10842 return fold_build2 (NE_EXPR, type, arg0, arg1);
10844 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10845 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10850 else if (!in_gimple_form
10851 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10852 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10853 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10854 /* signed_type does not work on pointer types. */
10855 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10857 /* The following case also applies to X < signed_max+1
10858 and X >= signed_max+1 because previous transformations. */
10859 if (code == LE_EXPR || code == GT_EXPR)
10862 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10863 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10864 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10865 type, fold_convert (st0, arg0),
10866 build_int_cst (st1, 0));
10872 /* If we are comparing an ABS_EXPR with a constant, we can
10873 convert all the cases into explicit comparisons, but they may
10874 well not be faster than doing the ABS and one comparison.
10875 But ABS (X) <= C is a range comparison, which becomes a subtraction
10876 and a comparison, and is probably faster. */
10877 if (code == LE_EXPR
10878 && TREE_CODE (arg1) == INTEGER_CST
10879 && TREE_CODE (arg0) == ABS_EXPR
10880 && ! TREE_SIDE_EFFECTS (arg0)
10881 && (0 != (tem = negate_expr (arg1)))
10882 && TREE_CODE (tem) == INTEGER_CST
10883 && ! TREE_CONSTANT_OVERFLOW (tem))
10884 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10885 build2 (GE_EXPR, type,
10886 TREE_OPERAND (arg0, 0), tem),
10887 build2 (LE_EXPR, type,
10888 TREE_OPERAND (arg0, 0), arg1));
10890 /* Convert ABS_EXPR<x> >= 0 to true. */
10891 if (code == GE_EXPR
10892 && tree_expr_nonnegative_p (arg0)
10893 && (integer_zerop (arg1)
10894 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10895 && real_zerop (arg1))))
10896 return omit_one_operand (type, integer_one_node, arg0);
10898 /* Convert ABS_EXPR<x> < 0 to false. */
10899 if (code == LT_EXPR
10900 && tree_expr_nonnegative_p (arg0)
10901 && (integer_zerop (arg1) || real_zerop (arg1)))
10902 return omit_one_operand (type, integer_zero_node, arg0);
10904 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10905 and similarly for >= into !=. */
10906 if ((code == LT_EXPR || code == GE_EXPR)
10907 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10908 && TREE_CODE (arg1) == LSHIFT_EXPR
10909 && integer_onep (TREE_OPERAND (arg1, 0)))
10910 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10911 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10912 TREE_OPERAND (arg1, 1)),
10913 build_int_cst (TREE_TYPE (arg0), 0));
10915 if ((code == LT_EXPR || code == GE_EXPR)
10916 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10917 && (TREE_CODE (arg1) == NOP_EXPR
10918 || TREE_CODE (arg1) == CONVERT_EXPR)
10919 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10920 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10922 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10923 fold_convert (TREE_TYPE (arg0),
10924 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10925 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10927 build_int_cst (TREE_TYPE (arg0), 0));
10931 case UNORDERED_EXPR:
10939 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10941 t1 = fold_relational_const (code, type, arg0, arg1);
10942 if (t1 != NULL_TREE)
10946 /* If the first operand is NaN, the result is constant. */
10947 if (TREE_CODE (arg0) == REAL_CST
10948 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10949 && (code != LTGT_EXPR || ! flag_trapping_math))
10951 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10952 ? integer_zero_node
10953 : integer_one_node;
10954 return omit_one_operand (type, t1, arg1);
10957 /* If the second operand is NaN, the result is constant. */
10958 if (TREE_CODE (arg1) == REAL_CST
10959 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10960 && (code != LTGT_EXPR || ! flag_trapping_math))
10962 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10963 ? integer_zero_node
10964 : integer_one_node;
10965 return omit_one_operand (type, t1, arg0);
10968 /* Simplify unordered comparison of something with itself. */
10969 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10970 && operand_equal_p (arg0, arg1, 0))
10971 return constant_boolean_node (1, type);
10973 if (code == LTGT_EXPR
10974 && !flag_trapping_math
10975 && operand_equal_p (arg0, arg1, 0))
10976 return constant_boolean_node (0, type);
10978 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10980 tree targ0 = strip_float_extensions (arg0);
10981 tree targ1 = strip_float_extensions (arg1);
10982 tree newtype = TREE_TYPE (targ0);
10984 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10985 newtype = TREE_TYPE (targ1);
10987 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10988 return fold_build2 (code, type, fold_convert (newtype, targ0),
10989 fold_convert (newtype, targ1));
10994 case COMPOUND_EXPR:
10995 /* When pedantic, a compound expression can be neither an lvalue
10996 nor an integer constant expression. */
10997 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10999 /* Don't let (0, 0) be null pointer constant. */
11000 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11001 : fold_convert (type, arg1);
11002 return pedantic_non_lvalue (tem);
11005 if ((TREE_CODE (arg0) == REAL_CST
11006 && TREE_CODE (arg1) == REAL_CST)
11007 || (TREE_CODE (arg0) == INTEGER_CST
11008 && TREE_CODE (arg1) == INTEGER_CST))
11009 return build_complex (type, arg0, arg1);
11013 /* An ASSERT_EXPR should never be passed to fold_binary. */
11014 gcc_unreachable ();
11018 } /* switch (code) */
11021 /* Callback for walk_tree, looking for LABEL_EXPR.
11022 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11023 Do not check the sub-tree of GOTO_EXPR. */
11026 contains_label_1 (tree *tp,
11027 int *walk_subtrees,
11028 void *data ATTRIBUTE_UNUSED)
11030 switch (TREE_CODE (*tp))
11035 *walk_subtrees = 0;
11042 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11043 accessible from outside the sub-tree. Returns NULL_TREE if no
11044 addressable label is found. */
11047 contains_label_p (tree st)
11049 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11052 /* Fold a ternary expression of code CODE and type TYPE with operands
11053 OP0, OP1, and OP2. Return the folded expression if folding is
11054 successful. Otherwise, return NULL_TREE. */
11057 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11060 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11061 enum tree_code_class kind = TREE_CODE_CLASS (code);
11063 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11064 && TREE_CODE_LENGTH (code) == 3);
11066 /* Strip any conversions that don't change the mode. This is safe
11067 for every expression, except for a comparison expression because
11068 its signedness is derived from its operands. So, in the latter
11069 case, only strip conversions that don't change the signedness.
11071 Note that this is done as an internal manipulation within the
11072 constant folder, in order to find the simplest representation of
11073 the arguments so that their form can be studied. In any cases,
11074 the appropriate type conversions should be put back in the tree
11075 that will get out of the constant folder. */
11090 case COMPONENT_REF:
11091 if (TREE_CODE (arg0) == CONSTRUCTOR
11092 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11094 unsigned HOST_WIDE_INT idx;
11096 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11103 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11104 so all simple results must be passed through pedantic_non_lvalue. */
11105 if (TREE_CODE (arg0) == INTEGER_CST)
11107 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11108 tem = integer_zerop (arg0) ? op2 : op1;
11109 /* Only optimize constant conditions when the selected branch
11110 has the same type as the COND_EXPR. This avoids optimizing
11111 away "c ? x : throw", where the throw has a void type.
11112 Avoid throwing away that operand which contains label. */
11113 if ((!TREE_SIDE_EFFECTS (unused_op)
11114 || !contains_label_p (unused_op))
11115 && (! VOID_TYPE_P (TREE_TYPE (tem))
11116 || VOID_TYPE_P (type)))
11117 return pedantic_non_lvalue (tem);
11120 if (operand_equal_p (arg1, op2, 0))
11121 return pedantic_omit_one_operand (type, arg1, arg0);
11123 /* If we have A op B ? A : C, we may be able to convert this to a
11124 simpler expression, depending on the operation and the values
11125 of B and C. Signed zeros prevent all of these transformations,
11126 for reasons given above each one.
11128 Also try swapping the arguments and inverting the conditional. */
11129 if (COMPARISON_CLASS_P (arg0)
11130 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11131 arg1, TREE_OPERAND (arg0, 1))
11132 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11134 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11139 if (COMPARISON_CLASS_P (arg0)
11140 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11142 TREE_OPERAND (arg0, 1))
11143 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11145 tem = fold_truth_not_expr (arg0);
11146 if (tem && COMPARISON_CLASS_P (tem))
11148 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11154 /* If the second operand is simpler than the third, swap them
11155 since that produces better jump optimization results. */
11156 if (truth_value_p (TREE_CODE (arg0))
11157 && tree_swap_operands_p (op1, op2, false))
11159 /* See if this can be inverted. If it can't, possibly because
11160 it was a floating-point inequality comparison, don't do
11162 tem = fold_truth_not_expr (arg0);
11164 return fold_build3 (code, type, tem, op2, op1);
11167 /* Convert A ? 1 : 0 to simply A. */
11168 if (integer_onep (op1)
11169 && integer_zerop (op2)
11170 /* If we try to convert OP0 to our type, the
11171 call to fold will try to move the conversion inside
11172 a COND, which will recurse. In that case, the COND_EXPR
11173 is probably the best choice, so leave it alone. */
11174 && type == TREE_TYPE (arg0))
11175 return pedantic_non_lvalue (arg0);
11177 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11178 over COND_EXPR in cases such as floating point comparisons. */
11179 if (integer_zerop (op1)
11180 && integer_onep (op2)
11181 && truth_value_p (TREE_CODE (arg0)))
11182 return pedantic_non_lvalue (fold_convert (type,
11183 invert_truthvalue (arg0)));
11185 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11186 if (TREE_CODE (arg0) == LT_EXPR
11187 && integer_zerop (TREE_OPERAND (arg0, 1))
11188 && integer_zerop (op2)
11189 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11190 return fold_convert (type,
11191 fold_build2 (BIT_AND_EXPR,
11192 TREE_TYPE (tem), tem,
11193 fold_convert (TREE_TYPE (tem), arg1)));
11195 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11196 already handled above. */
11197 if (TREE_CODE (arg0) == BIT_AND_EXPR
11198 && integer_onep (TREE_OPERAND (arg0, 1))
11199 && integer_zerop (op2)
11200 && integer_pow2p (arg1))
11202 tree tem = TREE_OPERAND (arg0, 0);
11204 if (TREE_CODE (tem) == RSHIFT_EXPR
11205 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11206 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11207 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11208 return fold_build2 (BIT_AND_EXPR, type,
11209 TREE_OPERAND (tem, 0), arg1);
11212 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11213 is probably obsolete because the first operand should be a
11214 truth value (that's why we have the two cases above), but let's
11215 leave it in until we can confirm this for all front-ends. */
11216 if (integer_zerop (op2)
11217 && TREE_CODE (arg0) == NE_EXPR
11218 && integer_zerop (TREE_OPERAND (arg0, 1))
11219 && integer_pow2p (arg1)
11220 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11221 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11222 arg1, OEP_ONLY_CONST))
11223 return pedantic_non_lvalue (fold_convert (type,
11224 TREE_OPERAND (arg0, 0)));
11226 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11227 if (integer_zerop (op2)
11228 && truth_value_p (TREE_CODE (arg0))
11229 && truth_value_p (TREE_CODE (arg1)))
11230 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11231 fold_convert (type, arg0),
11234 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11235 if (integer_onep (op2)
11236 && truth_value_p (TREE_CODE (arg0))
11237 && truth_value_p (TREE_CODE (arg1)))
11239 /* Only perform transformation if ARG0 is easily inverted. */
11240 tem = fold_truth_not_expr (arg0);
11242 return fold_build2 (TRUTH_ORIF_EXPR, type,
11243 fold_convert (type, tem),
11247 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11248 if (integer_zerop (arg1)
11249 && truth_value_p (TREE_CODE (arg0))
11250 && truth_value_p (TREE_CODE (op2)))
11252 /* Only perform transformation if ARG0 is easily inverted. */
11253 tem = fold_truth_not_expr (arg0);
11255 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11256 fold_convert (type, tem),
11260 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11261 if (integer_onep (arg1)
11262 && truth_value_p (TREE_CODE (arg0))
11263 && truth_value_p (TREE_CODE (op2)))
11264 return fold_build2 (TRUTH_ORIF_EXPR, type,
11265 fold_convert (type, arg0),
11271 /* Check for a built-in function. */
11272 if (TREE_CODE (op0) == ADDR_EXPR
11273 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11274 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11275 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11278 case BIT_FIELD_REF:
11279 if (TREE_CODE (arg0) == VECTOR_CST
11280 && type == TREE_TYPE (TREE_TYPE (arg0))
11281 && host_integerp (arg1, 1)
11282 && host_integerp (op2, 1))
11284 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11285 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11288 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11289 && (idx % width) == 0
11290 && (idx = idx / width)
11291 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11293 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11294 while (idx-- > 0 && elements)
11295 elements = TREE_CHAIN (elements);
11297 return TREE_VALUE (elements);
11299 return fold_convert (type, integer_zero_node);
11306 } /* switch (code) */
11309 /* Perform constant folding and related simplification of EXPR.
11310 The related simplifications include x*1 => x, x*0 => 0, etc.,
11311 and application of the associative law.
11312 NOP_EXPR conversions may be removed freely (as long as we
11313 are careful not to change the type of the overall expression).
11314 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11315 but we can constant-fold them if they have constant operands. */
11317 #ifdef ENABLE_FOLD_CHECKING
11318 # define fold(x) fold_1 (x)
11319 static tree fold_1 (tree);
11325 const tree t = expr;
11326 enum tree_code code = TREE_CODE (t);
11327 enum tree_code_class kind = TREE_CODE_CLASS (code);
11330 /* Return right away if a constant. */
11331 if (kind == tcc_constant)
11334 if (IS_EXPR_CODE_CLASS (kind))
11336 tree type = TREE_TYPE (t);
11337 tree op0, op1, op2;
11339 switch (TREE_CODE_LENGTH (code))
11342 op0 = TREE_OPERAND (t, 0);
11343 tem = fold_unary (code, type, op0);
11344 return tem ? tem : expr;
11346 op0 = TREE_OPERAND (t, 0);
11347 op1 = TREE_OPERAND (t, 1);
11348 tem = fold_binary (code, type, op0, op1);
11349 return tem ? tem : expr;
11351 op0 = TREE_OPERAND (t, 0);
11352 op1 = TREE_OPERAND (t, 1);
11353 op2 = TREE_OPERAND (t, 2);
11354 tem = fold_ternary (code, type, op0, op1, op2);
11355 return tem ? tem : expr;
11364 return fold (DECL_INITIAL (t));
11368 } /* switch (code) */
11371 #ifdef ENABLE_FOLD_CHECKING
11374 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11375 static void fold_check_failed (tree, tree);
11376 void print_fold_checksum (tree);
11378 /* When --enable-checking=fold, compute a digest of expr before
11379 and after actual fold call to see if fold did not accidentally
11380 change original expr. */
11386 struct md5_ctx ctx;
11387 unsigned char checksum_before[16], checksum_after[16];
11390 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11391 md5_init_ctx (&ctx);
11392 fold_checksum_tree (expr, &ctx, ht);
11393 md5_finish_ctx (&ctx, checksum_before);
11396 ret = fold_1 (expr);
11398 md5_init_ctx (&ctx);
11399 fold_checksum_tree (expr, &ctx, ht);
11400 md5_finish_ctx (&ctx, checksum_after);
11403 if (memcmp (checksum_before, checksum_after, 16))
11404 fold_check_failed (expr, ret);
11410 print_fold_checksum (tree expr)
11412 struct md5_ctx ctx;
11413 unsigned char checksum[16], cnt;
11416 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11417 md5_init_ctx (&ctx);
11418 fold_checksum_tree (expr, &ctx, ht);
11419 md5_finish_ctx (&ctx, checksum);
11421 for (cnt = 0; cnt < 16; ++cnt)
11422 fprintf (stderr, "%02x", checksum[cnt]);
11423 putc ('\n', stderr);
11427 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11429 internal_error ("fold check: original tree changed by fold");
11433 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11436 enum tree_code code;
11437 struct tree_function_decl buf;
11442 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11443 <= sizeof (struct tree_function_decl))
11444 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11447 slot = htab_find_slot (ht, expr, INSERT);
11451 code = TREE_CODE (expr);
11452 if (TREE_CODE_CLASS (code) == tcc_declaration
11453 && DECL_ASSEMBLER_NAME_SET_P (expr))
11455 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11456 memcpy ((char *) &buf, expr, tree_size (expr));
11457 expr = (tree) &buf;
11458 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11460 else if (TREE_CODE_CLASS (code) == tcc_type
11461 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11462 || TYPE_CACHED_VALUES_P (expr)
11463 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11465 /* Allow these fields to be modified. */
11466 memcpy ((char *) &buf, expr, tree_size (expr));
11467 expr = (tree) &buf;
11468 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11469 TYPE_POINTER_TO (expr) = NULL;
11470 TYPE_REFERENCE_TO (expr) = NULL;
11471 if (TYPE_CACHED_VALUES_P (expr))
11473 TYPE_CACHED_VALUES_P (expr) = 0;
11474 TYPE_CACHED_VALUES (expr) = NULL;
11477 md5_process_bytes (expr, tree_size (expr), ctx);
11478 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11479 if (TREE_CODE_CLASS (code) != tcc_type
11480 && TREE_CODE_CLASS (code) != tcc_declaration
11481 && code != TREE_LIST)
11482 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11483 switch (TREE_CODE_CLASS (code))
11489 md5_process_bytes (TREE_STRING_POINTER (expr),
11490 TREE_STRING_LENGTH (expr), ctx);
11493 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11494 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11497 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11503 case tcc_exceptional:
11507 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11508 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11509 expr = TREE_CHAIN (expr);
11510 goto recursive_label;
11513 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11514 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11520 case tcc_expression:
11521 case tcc_reference:
11522 case tcc_comparison:
11525 case tcc_statement:
11526 len = TREE_CODE_LENGTH (code);
11527 for (i = 0; i < len; ++i)
11528 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11530 case tcc_declaration:
11531 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11532 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11533 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11535 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11536 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11537 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11538 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11539 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11541 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11542 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11544 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11546 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11547 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11548 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11552 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11553 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11554 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11555 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11556 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11557 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11558 if (INTEGRAL_TYPE_P (expr)
11559 || SCALAR_FLOAT_TYPE_P (expr))
11561 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11562 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11564 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11565 if (TREE_CODE (expr) == RECORD_TYPE
11566 || TREE_CODE (expr) == UNION_TYPE
11567 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11568 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11569 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11578 /* Fold a unary tree expression with code CODE of type TYPE with an
11579 operand OP0. Return a folded expression if successful. Otherwise,
11580 return a tree expression with code CODE of type TYPE with an
11584 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11587 #ifdef ENABLE_FOLD_CHECKING
11588 unsigned char checksum_before[16], checksum_after[16];
11589 struct md5_ctx ctx;
11592 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11593 md5_init_ctx (&ctx);
11594 fold_checksum_tree (op0, &ctx, ht);
11595 md5_finish_ctx (&ctx, checksum_before);
11599 tem = fold_unary (code, type, op0);
11601 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11603 #ifdef ENABLE_FOLD_CHECKING
11604 md5_init_ctx (&ctx);
11605 fold_checksum_tree (op0, &ctx, ht);
11606 md5_finish_ctx (&ctx, checksum_after);
11609 if (memcmp (checksum_before, checksum_after, 16))
11610 fold_check_failed (op0, tem);
11615 /* Fold a binary tree expression with code CODE of type TYPE with
11616 operands OP0 and OP1. Return a folded expression if successful.
11617 Otherwise, return a tree expression with code CODE of type TYPE
11618 with operands OP0 and OP1. */
11621 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11625 #ifdef ENABLE_FOLD_CHECKING
11626 unsigned char checksum_before_op0[16],
11627 checksum_before_op1[16],
11628 checksum_after_op0[16],
11629 checksum_after_op1[16];
11630 struct md5_ctx ctx;
11633 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11634 md5_init_ctx (&ctx);
11635 fold_checksum_tree (op0, &ctx, ht);
11636 md5_finish_ctx (&ctx, checksum_before_op0);
11639 md5_init_ctx (&ctx);
11640 fold_checksum_tree (op1, &ctx, ht);
11641 md5_finish_ctx (&ctx, checksum_before_op1);
11645 tem = fold_binary (code, type, op0, op1);
11647 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11649 #ifdef ENABLE_FOLD_CHECKING
11650 md5_init_ctx (&ctx);
11651 fold_checksum_tree (op0, &ctx, ht);
11652 md5_finish_ctx (&ctx, checksum_after_op0);
11655 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11656 fold_check_failed (op0, tem);
11658 md5_init_ctx (&ctx);
11659 fold_checksum_tree (op1, &ctx, ht);
11660 md5_finish_ctx (&ctx, checksum_after_op1);
11663 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11664 fold_check_failed (op1, tem);
11669 /* Fold a ternary tree expression with code CODE of type TYPE with
11670 operands OP0, OP1, and OP2. Return a folded expression if
11671 successful. Otherwise, return a tree expression with code CODE of
11672 type TYPE with operands OP0, OP1, and OP2. */
11675 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11679 #ifdef ENABLE_FOLD_CHECKING
11680 unsigned char checksum_before_op0[16],
11681 checksum_before_op1[16],
11682 checksum_before_op2[16],
11683 checksum_after_op0[16],
11684 checksum_after_op1[16],
11685 checksum_after_op2[16];
11686 struct md5_ctx ctx;
11689 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11690 md5_init_ctx (&ctx);
11691 fold_checksum_tree (op0, &ctx, ht);
11692 md5_finish_ctx (&ctx, checksum_before_op0);
11695 md5_init_ctx (&ctx);
11696 fold_checksum_tree (op1, &ctx, ht);
11697 md5_finish_ctx (&ctx, checksum_before_op1);
11700 md5_init_ctx (&ctx);
11701 fold_checksum_tree (op2, &ctx, ht);
11702 md5_finish_ctx (&ctx, checksum_before_op2);
11706 tem = fold_ternary (code, type, op0, op1, op2);
11708 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11710 #ifdef ENABLE_FOLD_CHECKING
11711 md5_init_ctx (&ctx);
11712 fold_checksum_tree (op0, &ctx, ht);
11713 md5_finish_ctx (&ctx, checksum_after_op0);
11716 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11717 fold_check_failed (op0, tem);
11719 md5_init_ctx (&ctx);
11720 fold_checksum_tree (op1, &ctx, ht);
11721 md5_finish_ctx (&ctx, checksum_after_op1);
11724 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11725 fold_check_failed (op1, tem);
11727 md5_init_ctx (&ctx);
11728 fold_checksum_tree (op2, &ctx, ht);
11729 md5_finish_ctx (&ctx, checksum_after_op2);
11732 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11733 fold_check_failed (op2, tem);
11738 /* Perform constant folding and related simplification of initializer
11739 expression EXPR. These behave identically to "fold_buildN" but ignore
11740 potential run-time traps and exceptions that fold must preserve. */
11742 #define START_FOLD_INIT \
11743 int saved_signaling_nans = flag_signaling_nans;\
11744 int saved_trapping_math = flag_trapping_math;\
11745 int saved_rounding_math = flag_rounding_math;\
11746 int saved_trapv = flag_trapv;\
11747 int saved_folding_initializer = folding_initializer;\
11748 flag_signaling_nans = 0;\
11749 flag_trapping_math = 0;\
11750 flag_rounding_math = 0;\
11752 folding_initializer = 1;
11754 #define END_FOLD_INIT \
11755 flag_signaling_nans = saved_signaling_nans;\
11756 flag_trapping_math = saved_trapping_math;\
11757 flag_rounding_math = saved_rounding_math;\
11758 flag_trapv = saved_trapv;\
11759 folding_initializer = saved_folding_initializer;
11762 fold_build1_initializer (enum tree_code code, tree type, tree op)
11767 result = fold_build1 (code, type, op);
11774 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11779 result = fold_build2 (code, type, op0, op1);
11786 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11792 result = fold_build3 (code, type, op0, op1, op2);
11798 #undef START_FOLD_INIT
11799 #undef END_FOLD_INIT
11801 /* Determine if first argument is a multiple of second argument. Return 0 if
11802 it is not, or we cannot easily determined it to be.
11804 An example of the sort of thing we care about (at this point; this routine
11805 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11806 fold cases do now) is discovering that
11808 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11814 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11816 This code also handles discovering that
11818 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11820 is a multiple of 8 so we don't have to worry about dealing with a
11821 possible remainder.
11823 Note that we *look* inside a SAVE_EXPR only to determine how it was
11824 calculated; it is not safe for fold to do much of anything else with the
11825 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11826 at run time. For example, the latter example above *cannot* be implemented
11827 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11828 evaluation time of the original SAVE_EXPR is not necessarily the same at
11829 the time the new expression is evaluated. The only optimization of this
11830 sort that would be valid is changing
11832 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11836 SAVE_EXPR (I) * SAVE_EXPR (J)
11838 (where the same SAVE_EXPR (J) is used in the original and the
11839 transformed version). */
11842 multiple_of_p (tree type, tree top, tree bottom)
11844 if (operand_equal_p (top, bottom, 0))
11847 if (TREE_CODE (type) != INTEGER_TYPE)
11850 switch (TREE_CODE (top))
11853 /* Bitwise and provides a power of two multiple. If the mask is
11854 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11855 if (!integer_pow2p (bottom))
11860 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11861 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11865 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11866 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11869 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11873 op1 = TREE_OPERAND (top, 1);
11874 /* const_binop may not detect overflow correctly,
11875 so check for it explicitly here. */
11876 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11877 > TREE_INT_CST_LOW (op1)
11878 && TREE_INT_CST_HIGH (op1) == 0
11879 && 0 != (t1 = fold_convert (type,
11880 const_binop (LSHIFT_EXPR,
11883 && ! TREE_OVERFLOW (t1))
11884 return multiple_of_p (type, t1, bottom);
11889 /* Can't handle conversions from non-integral or wider integral type. */
11890 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11891 || (TYPE_PRECISION (type)
11892 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11895 /* .. fall through ... */
11898 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11901 if (TREE_CODE (bottom) != INTEGER_CST
11902 || (TYPE_UNSIGNED (type)
11903 && (tree_int_cst_sgn (top) < 0
11904 || tree_int_cst_sgn (bottom) < 0)))
11906 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11914 /* Return true if `t' is known to be non-negative. */
11917 tree_expr_nonnegative_p (tree t)
11919 if (t == error_mark_node)
11922 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11925 switch (TREE_CODE (t))
11928 /* Query VRP to see if it has recorded any information about
11929 the range of this object. */
11930 return ssa_name_nonnegative_p (t);
11933 /* We can't return 1 if flag_wrapv is set because
11934 ABS_EXPR<INT_MIN> = INT_MIN. */
11935 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11940 return tree_int_cst_sgn (t) >= 0;
11943 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11946 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11947 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11948 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11950 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11951 both unsigned and at least 2 bits shorter than the result. */
11952 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11953 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11954 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11956 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11957 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11958 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11959 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11961 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11962 TYPE_PRECISION (inner2)) + 1;
11963 return prec < TYPE_PRECISION (TREE_TYPE (t));
11969 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11971 /* x * x for floating point x is always non-negative. */
11972 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11974 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11975 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11978 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11979 both unsigned and their total bits is shorter than the result. */
11980 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11981 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11982 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11984 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11985 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11986 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11987 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11988 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11989 < TYPE_PRECISION (TREE_TYPE (t));
11995 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11996 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12002 case TRUNC_DIV_EXPR:
12003 case CEIL_DIV_EXPR:
12004 case FLOOR_DIV_EXPR:
12005 case ROUND_DIV_EXPR:
12006 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12007 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12009 case TRUNC_MOD_EXPR:
12010 case CEIL_MOD_EXPR:
12011 case FLOOR_MOD_EXPR:
12012 case ROUND_MOD_EXPR:
12014 case NON_LVALUE_EXPR:
12016 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12018 case COMPOUND_EXPR:
12020 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12023 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12026 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12027 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12031 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12032 tree outer_type = TREE_TYPE (t);
12034 if (TREE_CODE (outer_type) == REAL_TYPE)
12036 if (TREE_CODE (inner_type) == REAL_TYPE)
12037 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12038 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12040 if (TYPE_UNSIGNED (inner_type))
12042 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12045 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12047 if (TREE_CODE (inner_type) == REAL_TYPE)
12048 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12049 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12050 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12051 && TYPE_UNSIGNED (inner_type);
12058 tree temp = TARGET_EXPR_SLOT (t);
12059 t = TARGET_EXPR_INITIAL (t);
12061 /* If the initializer is non-void, then it's a normal expression
12062 that will be assigned to the slot. */
12063 if (!VOID_TYPE_P (t))
12064 return tree_expr_nonnegative_p (t);
12066 /* Otherwise, the initializer sets the slot in some way. One common
12067 way is an assignment statement at the end of the initializer. */
12070 if (TREE_CODE (t) == BIND_EXPR)
12071 t = expr_last (BIND_EXPR_BODY (t));
12072 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12073 || TREE_CODE (t) == TRY_CATCH_EXPR)
12074 t = expr_last (TREE_OPERAND (t, 0));
12075 else if (TREE_CODE (t) == STATEMENT_LIST)
12080 if (TREE_CODE (t) == MODIFY_EXPR
12081 && TREE_OPERAND (t, 0) == temp)
12082 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12089 tree fndecl = get_callee_fndecl (t);
12090 tree arglist = TREE_OPERAND (t, 1);
12091 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12092 switch (DECL_FUNCTION_CODE (fndecl))
12094 CASE_FLT_FN (BUILT_IN_ACOS):
12095 CASE_FLT_FN (BUILT_IN_ACOSH):
12096 CASE_FLT_FN (BUILT_IN_CABS):
12097 CASE_FLT_FN (BUILT_IN_COSH):
12098 CASE_FLT_FN (BUILT_IN_ERFC):
12099 CASE_FLT_FN (BUILT_IN_EXP):
12100 CASE_FLT_FN (BUILT_IN_EXP10):
12101 CASE_FLT_FN (BUILT_IN_EXP2):
12102 CASE_FLT_FN (BUILT_IN_FABS):
12103 CASE_FLT_FN (BUILT_IN_FDIM):
12104 CASE_FLT_FN (BUILT_IN_HYPOT):
12105 CASE_FLT_FN (BUILT_IN_POW10):
12106 CASE_INT_FN (BUILT_IN_FFS):
12107 CASE_INT_FN (BUILT_IN_PARITY):
12108 CASE_INT_FN (BUILT_IN_POPCOUNT):
12112 CASE_FLT_FN (BUILT_IN_SQRT):
12113 /* sqrt(-0.0) is -0.0. */
12114 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12116 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12118 CASE_FLT_FN (BUILT_IN_ASINH):
12119 CASE_FLT_FN (BUILT_IN_ATAN):
12120 CASE_FLT_FN (BUILT_IN_ATANH):
12121 CASE_FLT_FN (BUILT_IN_CBRT):
12122 CASE_FLT_FN (BUILT_IN_CEIL):
12123 CASE_FLT_FN (BUILT_IN_ERF):
12124 CASE_FLT_FN (BUILT_IN_EXPM1):
12125 CASE_FLT_FN (BUILT_IN_FLOOR):
12126 CASE_FLT_FN (BUILT_IN_FMOD):
12127 CASE_FLT_FN (BUILT_IN_FREXP):
12128 CASE_FLT_FN (BUILT_IN_LCEIL):
12129 CASE_FLT_FN (BUILT_IN_LDEXP):
12130 CASE_FLT_FN (BUILT_IN_LFLOOR):
12131 CASE_FLT_FN (BUILT_IN_LLCEIL):
12132 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12133 CASE_FLT_FN (BUILT_IN_LLRINT):
12134 CASE_FLT_FN (BUILT_IN_LLROUND):
12135 CASE_FLT_FN (BUILT_IN_LRINT):
12136 CASE_FLT_FN (BUILT_IN_LROUND):
12137 CASE_FLT_FN (BUILT_IN_MODF):
12138 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12139 CASE_FLT_FN (BUILT_IN_POW):
12140 CASE_FLT_FN (BUILT_IN_RINT):
12141 CASE_FLT_FN (BUILT_IN_ROUND):
12142 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12143 CASE_FLT_FN (BUILT_IN_SINH):
12144 CASE_FLT_FN (BUILT_IN_TANH):
12145 CASE_FLT_FN (BUILT_IN_TRUNC):
12146 /* True if the 1st argument is nonnegative. */
12147 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12149 CASE_FLT_FN (BUILT_IN_FMAX):
12150 /* True if the 1st OR 2nd arguments are nonnegative. */
12151 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12152 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12154 CASE_FLT_FN (BUILT_IN_FMIN):
12155 /* True if the 1st AND 2nd arguments are nonnegative. */
12156 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12157 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12159 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12160 /* True if the 2nd argument is nonnegative. */
12161 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12168 /* ... fall through ... */
12171 if (truth_value_p (TREE_CODE (t)))
12172 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12176 /* We don't know sign of `t', so be conservative and return false. */
12180 /* Return true when T is an address and is known to be nonzero.
12181 For floating point we further ensure that T is not denormal.
12182 Similar logic is present in nonzero_address in rtlanal.h. */
12185 tree_expr_nonzero_p (tree t)
12187 tree type = TREE_TYPE (t);
12189 /* Doing something useful for floating point would need more work. */
12190 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12193 switch (TREE_CODE (t))
12196 /* Query VRP to see if it has recorded any information about
12197 the range of this object. */
12198 return ssa_name_nonzero_p (t);
12201 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12204 /* We used to test for !integer_zerop here. This does not work correctly
12205 if TREE_CONSTANT_OVERFLOW (t). */
12206 return (TREE_INT_CST_LOW (t) != 0
12207 || TREE_INT_CST_HIGH (t) != 0);
12210 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12212 /* With the presence of negative values it is hard
12213 to say something. */
12214 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12215 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12217 /* One of operands must be positive and the other non-negative. */
12218 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12219 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12224 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12226 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12227 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12233 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12234 tree outer_type = TREE_TYPE (t);
12236 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12237 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12243 tree base = get_base_address (TREE_OPERAND (t, 0));
12248 /* Weak declarations may link to NULL. */
12249 if (VAR_OR_FUNCTION_DECL_P (base))
12250 return !DECL_WEAK (base);
12252 /* Constants are never weak. */
12253 if (CONSTANT_CLASS_P (base))
12260 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12261 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12264 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12265 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12268 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12270 /* When both operands are nonzero, then MAX must be too. */
12271 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12274 /* MAX where operand 0 is positive is positive. */
12275 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12277 /* MAX where operand 1 is positive is positive. */
12278 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12279 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12283 case COMPOUND_EXPR:
12286 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12289 case NON_LVALUE_EXPR:
12290 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12293 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12294 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12297 return alloca_call_p (t);
12305 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12306 attempt to fold the expression to a constant without modifying TYPE,
12309 If the expression could be simplified to a constant, then return
12310 the constant. If the expression would not be simplified to a
12311 constant, then return NULL_TREE. */
12314 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12316 tree tem = fold_binary (code, type, op0, op1);
12317 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12320 /* Given the components of a unary expression CODE, TYPE and OP0,
12321 attempt to fold the expression to a constant without modifying
12324 If the expression could be simplified to a constant, then return
12325 the constant. If the expression would not be simplified to a
12326 constant, then return NULL_TREE. */
12329 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12331 tree tem = fold_unary (code, type, op0);
12332 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12335 /* If EXP represents referencing an element in a constant string
12336 (either via pointer arithmetic or array indexing), return the
12337 tree representing the value accessed, otherwise return NULL. */
12340 fold_read_from_constant_string (tree exp)
12342 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12344 tree exp1 = TREE_OPERAND (exp, 0);
12348 if (TREE_CODE (exp) == INDIRECT_REF)
12349 string = string_constant (exp1, &index);
12352 tree low_bound = array_ref_low_bound (exp);
12353 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12355 /* Optimize the special-case of a zero lower bound.
12357 We convert the low_bound to sizetype to avoid some problems
12358 with constant folding. (E.g. suppose the lower bound is 1,
12359 and its mode is QI. Without the conversion,l (ARRAY
12360 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12361 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12362 if (! integer_zerop (low_bound))
12363 index = size_diffop (index, fold_convert (sizetype, low_bound));
12369 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12370 && TREE_CODE (string) == STRING_CST
12371 && TREE_CODE (index) == INTEGER_CST
12372 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12373 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12375 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12376 return fold_convert (TREE_TYPE (exp),
12377 build_int_cst (NULL_TREE,
12378 (TREE_STRING_POINTER (string)
12379 [TREE_INT_CST_LOW (index)])));
12384 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12385 an integer constant or real constant.
12387 TYPE is the type of the result. */
12390 fold_negate_const (tree arg0, tree type)
12392 tree t = NULL_TREE;
12394 switch (TREE_CODE (arg0))
12398 unsigned HOST_WIDE_INT low;
12399 HOST_WIDE_INT high;
12400 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12401 TREE_INT_CST_HIGH (arg0),
12403 t = build_int_cst_wide (type, low, high);
12404 t = force_fit_type (t, 1,
12405 (overflow | TREE_OVERFLOW (arg0))
12406 && !TYPE_UNSIGNED (type),
12407 TREE_CONSTANT_OVERFLOW (arg0));
12412 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12416 gcc_unreachable ();
12422 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12423 an integer constant or real constant.
12425 TYPE is the type of the result. */
12428 fold_abs_const (tree arg0, tree type)
12430 tree t = NULL_TREE;
12432 switch (TREE_CODE (arg0))
12435 /* If the value is unsigned, then the absolute value is
12436 the same as the ordinary value. */
12437 if (TYPE_UNSIGNED (type))
12439 /* Similarly, if the value is non-negative. */
12440 else if (INT_CST_LT (integer_minus_one_node, arg0))
12442 /* If the value is negative, then the absolute value is
12446 unsigned HOST_WIDE_INT low;
12447 HOST_WIDE_INT high;
12448 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12449 TREE_INT_CST_HIGH (arg0),
12451 t = build_int_cst_wide (type, low, high);
12452 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12453 TREE_CONSTANT_OVERFLOW (arg0));
12458 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12459 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12465 gcc_unreachable ();
12471 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12472 constant. TYPE is the type of the result. */
12475 fold_not_const (tree arg0, tree type)
12477 tree t = NULL_TREE;
12479 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12481 t = build_int_cst_wide (type,
12482 ~ TREE_INT_CST_LOW (arg0),
12483 ~ TREE_INT_CST_HIGH (arg0));
12484 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12485 TREE_CONSTANT_OVERFLOW (arg0));
12490 /* Given CODE, a relational operator, the target type, TYPE and two
12491 constant operands OP0 and OP1, return the result of the
12492 relational operation. If the result is not a compile time
12493 constant, then return NULL_TREE. */
12496 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12498 int result, invert;
12500 /* From here on, the only cases we handle are when the result is
12501 known to be a constant. */
12503 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12505 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12506 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12508 /* Handle the cases where either operand is a NaN. */
12509 if (real_isnan (c0) || real_isnan (c1))
12519 case UNORDERED_EXPR:
12533 if (flag_trapping_math)
12539 gcc_unreachable ();
12542 return constant_boolean_node (result, type);
12545 return constant_boolean_node (real_compare (code, c0, c1), type);
12548 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12550 To compute GT, swap the arguments and do LT.
12551 To compute GE, do LT and invert the result.
12552 To compute LE, swap the arguments, do LT and invert the result.
12553 To compute NE, do EQ and invert the result.
12555 Therefore, the code below must handle only EQ and LT. */
12557 if (code == LE_EXPR || code == GT_EXPR)
12562 code = swap_tree_comparison (code);
12565 /* Note that it is safe to invert for real values here because we
12566 have already handled the one case that it matters. */
12569 if (code == NE_EXPR || code == GE_EXPR)
12572 code = invert_tree_comparison (code, false);
12575 /* Compute a result for LT or EQ if args permit;
12576 Otherwise return T. */
12577 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12579 if (code == EQ_EXPR)
12580 result = tree_int_cst_equal (op0, op1);
12581 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12582 result = INT_CST_LT_UNSIGNED (op0, op1);
12584 result = INT_CST_LT (op0, op1);
12591 return constant_boolean_node (result, type);
12594 /* Build an expression for the a clean point containing EXPR with type TYPE.
12595 Don't build a cleanup point expression for EXPR which don't have side
12599 fold_build_cleanup_point_expr (tree type, tree expr)
12601 /* If the expression does not have side effects then we don't have to wrap
12602 it with a cleanup point expression. */
12603 if (!TREE_SIDE_EFFECTS (expr))
12606 /* If the expression is a return, check to see if the expression inside the
12607 return has no side effects or the right hand side of the modify expression
12608 inside the return. If either don't have side effects set we don't need to
12609 wrap the expression in a cleanup point expression. Note we don't check the
12610 left hand side of the modify because it should always be a return decl. */
12611 if (TREE_CODE (expr) == RETURN_EXPR)
12613 tree op = TREE_OPERAND (expr, 0);
12614 if (!op || !TREE_SIDE_EFFECTS (op))
12616 op = TREE_OPERAND (op, 1);
12617 if (!TREE_SIDE_EFFECTS (op))
12621 return build1 (CLEANUP_POINT_EXPR, type, expr);
12624 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12625 avoid confusing the gimplify process. */
12628 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12630 /* The size of the object is not relevant when talking about its address. */
12631 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12632 t = TREE_OPERAND (t, 0);
12634 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12635 if (TREE_CODE (t) == INDIRECT_REF
12636 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12638 t = TREE_OPERAND (t, 0);
12639 if (TREE_TYPE (t) != ptrtype)
12640 t = build1 (NOP_EXPR, ptrtype, t);
12646 while (handled_component_p (base))
12647 base = TREE_OPERAND (base, 0);
12649 TREE_ADDRESSABLE (base) = 1;
12651 t = build1 (ADDR_EXPR, ptrtype, t);
12658 build_fold_addr_expr (tree t)
12660 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12663 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12664 of an indirection through OP0, or NULL_TREE if no simplification is
12668 fold_indirect_ref_1 (tree type, tree op0)
12674 subtype = TREE_TYPE (sub);
12675 if (!POINTER_TYPE_P (subtype))
12678 if (TREE_CODE (sub) == ADDR_EXPR)
12680 tree op = TREE_OPERAND (sub, 0);
12681 tree optype = TREE_TYPE (op);
12682 /* *&p => p; make sure to handle *&"str"[cst] here. */
12683 if (type == optype)
12685 tree fop = fold_read_from_constant_string (op);
12691 /* *(foo *)&fooarray => fooarray[0] */
12692 else if (TREE_CODE (optype) == ARRAY_TYPE
12693 && type == TREE_TYPE (optype))
12695 tree type_domain = TYPE_DOMAIN (optype);
12696 tree min_val = size_zero_node;
12697 if (type_domain && TYPE_MIN_VALUE (type_domain))
12698 min_val = TYPE_MIN_VALUE (type_domain);
12699 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12701 /* *(foo *)&complexfoo => __real__ complexfoo */
12702 else if (TREE_CODE (optype) == COMPLEX_TYPE
12703 && type == TREE_TYPE (optype))
12704 return fold_build1 (REALPART_EXPR, type, op);
12707 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12708 if (TREE_CODE (sub) == PLUS_EXPR
12709 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12711 tree op00 = TREE_OPERAND (sub, 0);
12712 tree op01 = TREE_OPERAND (sub, 1);
12716 op00type = TREE_TYPE (op00);
12717 if (TREE_CODE (op00) == ADDR_EXPR
12718 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12719 && type == TREE_TYPE (TREE_TYPE (op00type)))
12721 tree size = TYPE_SIZE_UNIT (type);
12722 if (tree_int_cst_equal (size, op01))
12723 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12727 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12728 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12729 && type == TREE_TYPE (TREE_TYPE (subtype)))
12732 tree min_val = size_zero_node;
12733 sub = build_fold_indirect_ref (sub);
12734 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12735 if (type_domain && TYPE_MIN_VALUE (type_domain))
12736 min_val = TYPE_MIN_VALUE (type_domain);
12737 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12743 /* Builds an expression for an indirection through T, simplifying some
12747 build_fold_indirect_ref (tree t)
12749 tree type = TREE_TYPE (TREE_TYPE (t));
12750 tree sub = fold_indirect_ref_1 (type, t);
12755 return build1 (INDIRECT_REF, type, t);
12758 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12761 fold_indirect_ref (tree t)
12763 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12771 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12772 whose result is ignored. The type of the returned tree need not be
12773 the same as the original expression. */
12776 fold_ignored_result (tree t)
12778 if (!TREE_SIDE_EFFECTS (t))
12779 return integer_zero_node;
12782 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12785 t = TREE_OPERAND (t, 0);
12789 case tcc_comparison:
12790 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12791 t = TREE_OPERAND (t, 0);
12792 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12793 t = TREE_OPERAND (t, 1);
12798 case tcc_expression:
12799 switch (TREE_CODE (t))
12801 case COMPOUND_EXPR:
12802 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12804 t = TREE_OPERAND (t, 0);
12808 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12809 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12811 t = TREE_OPERAND (t, 0);
12824 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12825 This can only be applied to objects of a sizetype. */
12828 round_up (tree value, int divisor)
12830 tree div = NULL_TREE;
12832 gcc_assert (divisor > 0);
12836 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12837 have to do anything. Only do this when we are not given a const,
12838 because in that case, this check is more expensive than just
12840 if (TREE_CODE (value) != INTEGER_CST)
12842 div = build_int_cst (TREE_TYPE (value), divisor);
12844 if (multiple_of_p (TREE_TYPE (value), value, div))
12848 /* If divisor is a power of two, simplify this to bit manipulation. */
12849 if (divisor == (divisor & -divisor))
12853 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12854 value = size_binop (PLUS_EXPR, value, t);
12855 t = build_int_cst (TREE_TYPE (value), -divisor);
12856 value = size_binop (BIT_AND_EXPR, value, t);
12861 div = build_int_cst (TREE_TYPE (value), divisor);
12862 value = size_binop (CEIL_DIV_EXPR, value, div);
12863 value = size_binop (MULT_EXPR, value, div);
12869 /* Likewise, but round down. */
12872 round_down (tree value, int divisor)
12874 tree div = NULL_TREE;
12876 gcc_assert (divisor > 0);
12880 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12881 have to do anything. Only do this when we are not given a const,
12882 because in that case, this check is more expensive than just
12884 if (TREE_CODE (value) != INTEGER_CST)
12886 div = build_int_cst (TREE_TYPE (value), divisor);
12888 if (multiple_of_p (TREE_TYPE (value), value, div))
12892 /* If divisor is a power of two, simplify this to bit manipulation. */
12893 if (divisor == (divisor & -divisor))
12897 t = build_int_cst (TREE_TYPE (value), -divisor);
12898 value = size_binop (BIT_AND_EXPR, value, t);
12903 div = build_int_cst (TREE_TYPE (value), divisor);
12904 value = size_binop (FLOOR_DIV_EXPR, value, div);
12905 value = size_binop (MULT_EXPR, value, div);
12911 /* Returns the pointer to the base of the object addressed by EXP and
12912 extracts the information about the offset of the access, storing it
12913 to PBITPOS and POFFSET. */
12916 split_address_to_core_and_offset (tree exp,
12917 HOST_WIDE_INT *pbitpos, tree *poffset)
12920 enum machine_mode mode;
12921 int unsignedp, volatilep;
12922 HOST_WIDE_INT bitsize;
12924 if (TREE_CODE (exp) == ADDR_EXPR)
12926 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12927 poffset, &mode, &unsignedp, &volatilep,
12929 core = build_fold_addr_expr (core);
12935 *poffset = NULL_TREE;
12941 /* Returns true if addresses of E1 and E2 differ by a constant, false
12942 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12945 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12948 HOST_WIDE_INT bitpos1, bitpos2;
12949 tree toffset1, toffset2, tdiff, type;
12951 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12952 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12954 if (bitpos1 % BITS_PER_UNIT != 0
12955 || bitpos2 % BITS_PER_UNIT != 0
12956 || !operand_equal_p (core1, core2, 0))
12959 if (toffset1 && toffset2)
12961 type = TREE_TYPE (toffset1);
12962 if (type != TREE_TYPE (toffset2))
12963 toffset2 = fold_convert (type, toffset2);
12965 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12966 if (!cst_and_fits_in_hwi (tdiff))
12969 *diff = int_cst_value (tdiff);
12971 else if (toffset1 || toffset2)
12973 /* If only one of the offsets is non-constant, the difference cannot
12980 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12984 /* Simplify the floating point expression EXP when the sign of the
12985 result is not significant. Return NULL_TREE if no simplification
12989 fold_strip_sign_ops (tree exp)
12993 switch (TREE_CODE (exp))
12997 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12998 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13002 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13004 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13005 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13006 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13007 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13008 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13009 arg1 ? arg1 : TREE_OPERAND (exp, 1));